Compare commits

..

3 Commits

Author SHA1 Message Date
Tatyana Kostromskaya
6bde7fe413 Update release version 2023-07-11 11:27:57 +00:00
Tatyana Kostromskaya
2494aa2b0d Add warning to notify about forcing node16 (#2678)
* add warning

* fix tests

* aggregate to one warning

* fix linter

* fix comm
2023-07-11 11:13:53 +00:00
Tingluo Huang
9ef4121b5b Create 2.304.0 runner release 2023-04-26 16:04:26 -04:00
374 changed files with 5203 additions and 12078 deletions

View File

@@ -1,27 +1,27 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
{ {
"name": "Actions Runner Devcontainer", "name": "Actions Runner Devcontainer",
"image": "mcr.microsoft.com/devcontainers/base:focal", "image": "mcr.microsoft.com/devcontainers/base:focal",
"features": { "features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {}, "ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": { "ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.421" "version": "6.0.405"
}, },
"ghcr.io/devcontainers/features/node:1": { "ghcr.io/devcontainers/features/node:1": {
"version": "16" "version": "16"
}, }
"ghcr.io/devcontainers/features/sshd:1": { },
"version": "latest" "customizations": {
} "vscode": {
}, "extensions": [
"customizations": { "ms-azuretools.vscode-docker",
"vscode": { "ms-dotnettools.csharp",
"extensions": [ "eamodio.gitlens"
"ms-azuretools.vscode-docker", ]
"ms-dotnettools.csharp", }
"eamodio.gitlens" },
] // dotnet restore to install dependencies so OmniSharp works out of the box
} // src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
}, "postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost", "remoteUser": "vscode"
"remoteUser": "vscode" }
}

9
.editorconfig Normal file
View File

@@ -0,0 +1,9 @@
# https://editorconfig.org/
[*]
charset = utf-8 # Set default charset to utf-8
insert_final_newline = true # ensure all files end with a single newline
trim_trailing_whitespace = true # attempt to remove trailing whitespace on save
[*.md]
trim_trailing_whitespace = false # in markdown, "two trailing spaces" is unfortunately meaningful; it means `<br>`

View File

@@ -1,20 +0,0 @@
version: 2
updates:
- package-ecosystem: "docker"
directory: "/images"
schedule:
interval: "daily"
target-branch: "main"
- package-ecosystem: "nuget"
directory: "/src"
schedule:
interval: "daily"
target-branch: "main"
- package-ecosystem: "npm"
directory: "/src/Misc/expressionFunc/hashFiles"
schedule:
interval: "daily"
target-branch: "main"
allow:
- dependency-type: direct
- dependency-type: production # check only dependencies, which are going to the compiled app, not supporting tools like @vue-cli

View File

@@ -58,6 +58,29 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }} ${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src working-directory: src
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
shell: bash
run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
echo "Current Externals hash result: $EXTERNALS_HASH"
NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
NeedUpdate=1
fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
NeedUpdate=1
fi
exit $NeedUpdate
env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests # Run tests
- name: L0 - name: L0
run: | run: |
@@ -75,8 +98,11 @@ jobs:
# Upload runner package tar.gz/zip as artifact # Upload runner package tar.gz/zip as artifact
- name: Publish Artifact - name: Publish Artifact
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: runner-package-${{ matrix.runtime }} name: runner-package-${{ matrix.runtime }}
path: | path: |
_package _package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals

View File

@@ -1,17 +0,0 @@
name: Close Bugs Bot
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *' # every day at midnight
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
exempt-issue-labels: "keep"
stale-issue-label: "actions-bug"
only-labels: "actions-bug"
days-before-stale: 0
days-before-close: 1

View File

@@ -1,17 +0,0 @@
name: Close Features Bot
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *' # every day at midnight
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
exempt-issue-labels: "keep"
stale-issue-label: "actions-feature"
only-labels: "actions-feature"
days-before-stale: 0
days-before-close: 1

View File

@@ -1,105 +0,0 @@
name: "DotNet SDK Upgrade"
on:
schedule:
- cron: '0 0 * * 1'
workflow_dispatch:
jobs:
dotnet-update:
runs-on: ubuntu-latest
outputs:
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Get current major minor version
id: fetch_current_version
shell: bash
run: |
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
- name: Check patch version
id: fetch_latest_version
shell: bash
run: |
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
should_update=0
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
# check if git branch already exists for the upgrade
branch_already_exists=0
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
then
branch_already_exists=1
should_update=0
fi
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
- name: Create an error annotation if branch exists
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
- name: Create a warning annotation if no need to update
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
- name: Update patch version
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
shell: bash
run: |
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
# Update globals
echo Updating globals
globals_temp=$(mktemp)
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
# Update devcontainer
echo Updating devcontainer
devcontainer_temp=$(mktemp)
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
# Update dev.sh
echo Updating start script
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
- name: GIT commit and push all changed files
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
id: create_branch
run: |
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
git config --global user.name "github-actions[bot]"
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
git checkout -b $branch_name
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
git push --set-upstream origin $branch_name
create-pr:
needs: [dotnet-update]
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Create Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
---
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"

View File

@@ -25,12 +25,10 @@ jobs:
- name: Compute image version - name: Compute image version
id: image id: image
uses: actions/github-script@v6 uses: actions/github-script@v6
env:
RUNNER_VERSION: ${{ github.event.inputs.runnerVersion }}
with: with:
script: | script: |
const fs = require('fs'); const fs = require('fs');
const inputRunnerVersion = process.env.RUNNER_VERSION; const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}"
if (inputRunnerVersion) { if (inputRunnerVersion) {
console.log(`Using input runner version ${inputRunnerVersion}`) console.log(`Using input runner version ${inputRunnerVersion}`)
core.setOutput('version', inputRunnerVersion); core.setOutput('version', inputRunnerVersion);
@@ -55,9 +53,6 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
context: ./images context: ./images
platforms: |
linux/amd64
linux/arm64
tags: | tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }} ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest

View File

@@ -53,6 +53,27 @@ jobs:
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }} win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }} osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }} osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
strategy: strategy:
matrix: matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ] runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
@@ -115,15 +136,90 @@ jobs:
id: sha id: sha
name: Compute SHA256 name: Compute SHA256
working-directory: _package working-directory: _package
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_externals
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime
name: Compute SHA256
working-directory: _package_trims/trim_runtime
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_runtime_externals
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
# Upload runner package tar.gz/zip as artifact. # Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
- name: Publish Artifact - name: Publish Artifact
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: runner-packages-${{ matrix.runtime }} name: runner-packages
path: | path: |
_package _package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
${{ matrix.runtime }}-trimmedpackages.json
release: release:
needs: build needs: build
@@ -133,40 +229,10 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
# Download runner package tar.gz/zip produced by 'build' job # Download runner package tar.gz/zip produced by 'build' job
- name: Download Artifact (win-x64) - name: Download Artifact
uses: actions/download-artifact@v4 uses: actions/download-artifact@v1
with: with:
name: runner-packages-win-x64 name: runner-packages
path: ./
- name: Download Artifact (win-arm64)
uses: actions/download-artifact@v4
with:
name: runner-packages-win-arm64
path: ./
- name: Download Artifact (osx-x64)
uses: actions/download-artifact@v4
with:
name: runner-packages-osx-x64
path: ./
- name: Download Artifact (osx-arm64)
uses: actions/download-artifact@v4
with:
name: runner-packages-osx-arm64
path: ./
- name: Download Artifact (linux-x64)
uses: actions/download-artifact@v4
with:
name: runner-packages-linux-x64
path: ./
- name: Download Artifact (linux-arm)
uses: actions/download-artifact@v4
with:
name: runner-packages-linux-arm
path: ./
- name: Download Artifact (linux-arm64)
uses: actions/download-artifact@v4
with:
name: runner-packages-linux-arm64
path: ./ path: ./
# Create ReleaseNote file # Create ReleaseNote file
@@ -187,11 +253,33 @@ jobs:
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}') releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}') releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}') releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
console.log(releaseNote) console.log(releaseNote)
core.setOutput('version', runnerVersion); core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote); core.setOutput('note', releaseNote);
- name: Validate Packages HASH - name: Validate Packages HASH
working-directory: _package
run: | run: |
ls -l ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
@@ -221,7 +309,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -231,7 +319,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -241,7 +329,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -251,7 +339,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -261,7 +349,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -271,7 +359,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
@@ -281,10 +369,298 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.createRelease.outputs.upload_url }} upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
publish-image: publish-image:
needs: release needs: release
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -323,9 +699,6 @@ jobs:
uses: docker/build-push-action@v3 uses: docker/build-push-action@v3
with: with:
context: ./images context: ./images
platforms: |
linux/amd64
linux/arm64
tags: | tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }} ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest

View File

@@ -1,16 +0,0 @@
name: Stale Bot
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * 1' # every monday at midnight
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
exempt-issue-labels: "keep"
days-before-stale: 365
days-before-close: 15

View File

@@ -1,6 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
cd src/Misc/expressionFunc/hashFiles
npx lint-staged

View File

@@ -7,26 +7,19 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- For GitHub.com - For GitHub.com
- The runner needs to access `https://api.github.com` for downloading actions. - The runner needs to access `https://api.github.com` for downloading actions.
- The runner needs to access `https://codeload.github.com` for downloading actions tar.gz/zip.
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token. - The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs. - The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
- The runner needs to access `https://results-receiver.actions.githubusercontent.com/.../` for reporting progress and uploading logs during a workflow job execution.
---
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
These can by tested by running the following `curl` commands from your self-hosted runner machine: These can by tested by running the following `curl` commands from your self-hosted runner machine:
``` ```
curl -v https://api.github.com/zen curl -v https://api.github.com/api/v3/zen
curl -v https://codeload.github.com/_ping
curl -v https://vstoken.actions.githubusercontent.com/_apis/health curl -v https://vstoken.actions.githubusercontent.com/_apis/health
curl -v https://pipelines.actions.githubusercontent.com/_apis/health curl -v https://pipelines.actions.githubusercontent.com/_apis/health
curl -v https://results-receiver.actions.githubusercontent.com/health
``` ```
- For GitHub Enterprise Server - For GitHub Enterprise Server
- The runner needs to access `https://[hostname]/api/v3` for downloading actions. - The runner needs to access `https://[hostname]/api/v3` for downloading actions.
- The runner needs to access `https://codeload.[hostname]/_ping` for downloading actions tar.gz/zip.
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token. - The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs. - The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
@@ -34,7 +27,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
``` ```
curl -v https://[hostname]/api/v3/zen curl -v https://[hostname]/api/v3/zen
curl -v https://codeload.[hostname]/_ping
curl -v https://[hostname]/_services/vstoken/_apis/health curl -v https://[hostname]/_services/vstoken/_apis/health
curl -v https://[hostname]/_services/pipelines/_apis/health curl -v https://[hostname]/_services/pipelines/_apis/health
``` ```
@@ -50,10 +42,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- Ping api.github.com or myGHES.com using dotnet - Ping api.github.com or myGHES.com using dotnet
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id` - Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
--- ---
- DNS lookup for codeload.github.com or codeload.myGHES.com using dotnet
- Ping codeload.github.com or codeload.myGHES.com using dotnet
- Make HTTP GET to https://codeload.github.com/_ping or https://codeload.myGHES.com/_ping using dotnet, check response headers contains `X-GitHub-Request-Id`
---
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet - DNS lookup for vstoken.actions.githubusercontent.com using dotnet
- Ping vstoken.actions.githubusercontent.com using dotnet - Ping vstoken.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid` - Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
@@ -62,10 +50,6 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
- Ping pipelines.actions.githubusercontent.com using dotnet - Ping pipelines.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid` - Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid` - Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
---
- DNS lookup for results-receiver.actions.githubusercontent.com using dotnet
- Ping results-receiver.actions.githubusercontent.com using dotnet
- Make HTTP GET to https://results-receiver.actions.githubusercontent.com/health using dotnet, check response headers contains `X-GitHub-Request-Id`
## How to fix the issue? ## How to fix the issue?

View File

@@ -14,7 +14,7 @@
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081) - A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc - Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
### Identify and solve these problems ### Identify and solve these problems
@@ -42,7 +42,6 @@ If you are having trouble connecting, try these steps:
- https://api.github.com/ - https://api.github.com/
- https://vstoken.actions.githubusercontent.com/_apis/health - https://vstoken.actions.githubusercontent.com/_apis/health
- https://pipelines.actions.githubusercontent.com/_apis/health - https://pipelines.actions.githubusercontent.com/_apis/health
- https://results-receiver.actions.githubusercontent.com/health
- For GHES/GHAE - For GHES/GHAE
- https://myGHES.com/_services/vstoken/_apis/health - https://myGHES.com/_services/vstoken/_apis/health
- https://myGHES.com/_services/pipelines/_apis/health - https://myGHES.com/_services/pipelines/_apis/health

View File

@@ -157,7 +157,7 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
## Styling ## Styling
We use the .NET Foundation and CoreCLR style guidelines [located here]( We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md) https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
### Format C# Code ### Format C# Code
@@ -165,4 +165,4 @@ To format both staged and unstaged .cs files
``` ```
cd ./src cd ./src
./dev.(cmd|sh) format ./dev.(cmd|sh) format
``` ```

View File

@@ -4,7 +4,16 @@
## Supported Distributions and Versions ## Supported Distributions and Versions
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#linux)." x64
- Red Hat Enterprise Linux 7
- CentOS 7
- Oracle Linux 7
- Fedora 29+
- Debian 9+
- Ubuntu 16.04+
- Linux Mint 18+
- openSUSE 15+
- SUSE Enterprise Linux (SLES) 12 SP2+
## Install .Net Core 3.x Linux Dependencies ## Install .Net Core 3.x Linux Dependencies

View File

@@ -4,6 +4,7 @@
## Supported Versions ## Supported Versions
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#macos)." - macOS High Sierra (10.13) and later versions
- x64 and arm64 (Apple Silicon)
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30) ## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)

View File

@@ -2,6 +2,11 @@
## Supported Versions ## Supported Versions
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#windows)." - Windows 7 64-bit
- Windows 8.1 64-bit
- Windows 10 64-bit
- Windows Server 2012 R2 64-bit
- Windows Server 2016 64-bit
- Windows Server 2019 64-bit
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30) ## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)

View File

@@ -1,19 +1,14 @@
# Source: https://github.com/dotnet/dotnet-docker FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
ARG TARGETOS
ARG TARGETARCH
ARG RUNNER_VERSION ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.6.1 ARG RUNNER_ARCH="x64"
ARG DOCKER_VERSION=27.1.1 ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
ARG BUILDX_VERSION=0.16.2 ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y RUN apt update -y && apt install curl unzip -y
WORKDIR /actions-runner WORKDIR /actions-runner
RUN export RUNNER_ARCH=${TARGETARCH} \ RUN curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export RUNNER_ARCH=x64 ; fi \
&& curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-${TARGETOS}-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
&& tar xzf ./runner.tar.gz \ && tar xzf ./runner.tar.gz \
&& rm runner.tar.gz && rm runner.tar.gz
@@ -21,34 +16,23 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
&& unzip ./runner-container-hooks.zip -d ./k8s \ && unzip ./runner-container-hooks.zip -d ./k8s \
&& rm runner-container-hooks.zip && rm runner-container-hooks.zip
RUN export RUNNER_ARCH=${TARGETARCH} \ RUN export DOCKER_ARCH=x86_64 \
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \ && if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
&& curl -fLo docker.tgz https://download.docker.com/${TARGETOS}/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \ && curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
&& tar zxvf docker.tgz \ && tar zxvf docker.tgz \
&& rm -rf docker.tgz \ && rm -rf docker.tgz
&& mkdir -p /usr/local/lib/docker/cli-plugins \
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1 ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1 ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
ENV ImageOS=ubuntu22
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows RUN apt-get update -y \
RUN apt update -y \ && apt-get install -y --no-install-recommends \
&& apt install -y --no-install-recommends sudo lsb-release gpg-agent software-properties-common curl jq unzip \ sudo \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# Configure git-core/ppa based on guidance here: https://git-scm.com/download/linux
RUN add-apt-repository ppa:git-core/ppa \
&& apt update -y \
&& apt install -y --no-install-recommends git
RUN adduser --disabled-password --gecos "" --uid 1001 runner \ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \ && groupadd docker --gid 123 \
&& usermod -aG sudo runner \ && usermod -aG sudo runner \
@@ -59,7 +43,6 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
WORKDIR /home/runner WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner . COPY --chown=runner:docker --from=build /actions-runner .
COPY --from=build /usr/local/lib/docker/cli-plugins/docker-buildx /usr/local/lib/docker/cli-plugins/docker-buildx
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker

View File

@@ -1,29 +1,29 @@
## What's Changed ## Features
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
- Add `*.ghe.localhost` domains to hosted server check (#2536)
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
- Add warning to notify about forcing actions to run on node16 instead of node12 (#2678)
- Adding Snapshot additional mapping tokens https://github.com/actions/runner/pull/3468 ## Bugs
- Create launch httpclient using the right handler and setting https://github.com/actions/runner/pull/3476 - Fix JIT configurations on Windows (#2497)
- Fix missing default user-agent for jitconfig runner https://github.com/actions/runner/pull/3473 - Guard against NullReference while creating HostContext (#2343)
- Cleanup back-compat code for interpreting Run Service status codes https://github.com/actions/runner/pull/3456 - Handles broken symlink in `Which` (#2150, #2196)
- Add runner or worker to the useragent https://github.com/actions/runner/pull/3457 - Adding curl retry for external tool downloads (#2552, #2557)
- Handle Error Body in Responses from Broker https://github.com/actions/runner/pull/3454 - Limit the time we wait for waiting websocket to connect. (#2554)
- Fix issues for composite actions (Run Service flow) https://github.com/actions/runner/pull/3446
- Trace GitHub RequestId to log https://github.com/actions/runner/pull/3442
- Add `jq`, `git`, `unzip` and `curl` to default packages installed https://github.com/actions/runner/pull/3056
- Add pid to user-agent and session owner https://github.com/actions/runner/pull/3432
**Full Changelog**: https://github.com/actions/runner/compare/v2.319.1...v2.320.0 ## Misc
- Bump container hooks version to 0.3.1 in runner image (#2496)
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. _Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_ See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
## Windows x64 ## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows. We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`: The following snipped needs to be run on `powershell`:
``` powershell
```powershell
# Create a folder under the drive root # Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner mkdir \actions-runner ; cd \actions-runner
# Download the latest runner package # Download the latest runner package
@@ -34,14 +34,12 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
``` ```
## [Pre-release] Windows arm64 ## [Pre-release] Windows arm64
**Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows. **Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows.
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows. We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`: The following snipped needs to be run on `powershell`:
``` powershell
```powershell
# Create a folder under the drive root # Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner mkdir \actions-runner ; cd \actions-runner
# Download the latest runner package # Download the latest runner package
@@ -53,7 +51,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
## OSX x64 ## OSX x64
```bash ``` bash
# Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
# Download the latest runner package # Download the latest runner package
@@ -64,7 +62,7 @@ tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
## OSX arm64 (Apple silicon) ## OSX arm64 (Apple silicon)
```bash ``` bash
# Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
# Download the latest runner package # Download the latest runner package
@@ -75,7 +73,7 @@ tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
## Linux x64 ## Linux x64
```bash ``` bash
# Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
# Download the latest runner package # Download the latest runner package
@@ -86,7 +84,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
## Linux arm64 ## Linux arm64
```bash ``` bash
# Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
# Download the latest runner package # Download the latest runner package
@@ -97,7 +95,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
## Linux arm ## Linux arm
```bash ``` bash
# Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
# Download the latest runner package # Download the latest runner package
@@ -107,7 +105,6 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
``` ```
## Using your self hosted runner ## Using your self hosted runner
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners) For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
## SHA-256 Checksums ## SHA-256 Checksums
@@ -121,3 +118,27 @@ The SHA-256 checksums for the packages included in this build are shown below:
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 --> - actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 --> - actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm --> - actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->

View File

@@ -1 +1 @@
2.320.0 2.304.1

View File

@@ -8,7 +8,7 @@ set -e
# Configures it as a service more secure # Configures it as a service more secure
# Should be used on VMs and not containers # Should be used on VMs and not containers
# Works on OSX and Linux # Works on OSX and Linux
# Assumes x64 arch (support arm64) # Assumes x64 arch
# See EXAMPLES below # See EXAMPLES below
flags_found=false flags_found=false
@@ -87,9 +87,6 @@ sudo echo
runner_plat=linux runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx; [ ! -z "$(which sw_vers)" ] && runner_plat=osx;
runner_arch=x64
[ ! -z "$(arch | grep arm64)" ] && runner_arch=arm64
function fatal() function fatal()
{ {
echo "error: $1" >&2 echo "error: $1" >&2
@@ -142,7 +139,7 @@ echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com # For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name') latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1}) latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-${runner_arch}-${latest_version}.tar.gz" runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
if [ -f "${runner_file}" ]; then if [ -f "${runner_file}" ]; then
echo "${runner_file} exists. skipping download." echo "${runner_file} exists. skipping download."

View File

@@ -1,5 +1,5 @@
[*.cs] [*.cs]
charset = utf-8-bom charset = utf-8
insert_final_newline = true insert_final_newline = true
csharp_new_line_before_else = true csharp_new_line_before_else = true

View File

@@ -0,0 +1 @@
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602

View File

@@ -0,0 +1 @@
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5

View File

@@ -0,0 +1 @@
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c

View File

@@ -0,0 +1 @@
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e

View File

@@ -0,0 +1 @@
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c

View File

@@ -0,0 +1 @@
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673

View File

@@ -0,0 +1 @@
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3

View File

@@ -0,0 +1 @@
3807dcbf947e840c33535fb466b096d76bf09e5c0254af8fc8cbbb24c6388222

View File

@@ -0,0 +1 @@
ee01eee80cd8a460a4b9780ee13fdd20f25c59e754b4ccd99df55fbba2a85634

View File

@@ -0,0 +1 @@
a9fb9c14e24e79aec97d4da197dd7bfc6364297d6fce573afb2df48cc9a931f8

View File

@@ -0,0 +1 @@
a4e0e8fc62eba0967a39c7d693dcd0aeb8b2bed0765f9c38df80d42884f65341

View File

@@ -0,0 +1 @@
17ac17fbe785b3d6fa2868d8d17185ebfe0c90b4b0ddf6b67eac70e42bcd989b

View File

@@ -0,0 +1 @@
89f24657a550f1e818b0e9975e5b80edcf4dd22b7d4bccbb9e48e37f45d30fb1

View File

@@ -0,0 +1 @@
24fd131b5dce33ef16038b771407bc0507da8682a72fb3b7780607235f76db0b

View File

@@ -1,19 +1,11 @@
{ {
"printWidth": 80, "printWidth": 80,
"tabWidth": 2, "tabWidth": 2,
"useTabs": false, "useTabs": false,
"semi": false, "semi": false,
"singleQuote": true, "singleQuote": true,
"trailingComma": "none", "trailingComma": "none",
"bracketSpacing": false, "bracketSpacing": false,
"arrowParens": "avoid", "arrowParens": "avoid",
"overrides": [ "parser": "typescript"
{ }
"files": "*.{js,ts,json}",
"options": {
"tabWidth": 2
}
}
]
}

View File

@@ -1,3 +1,4 @@
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`. To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run all`.
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit. > Note: this package also needs to be recompiled for dependabot PRs updating one of
> its dependencies.

File diff suppressed because it is too large Load Diff

View File

@@ -9,9 +9,7 @@
"format-check": "prettier --check **/*.ts", "format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts", "lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/hashFiles", "pack": "ncc build -o ../../layoutbin/hashFiles",
"all": "npm run format && npm run lint && npm run build && npm run pack", "all": "npm run build && npm run format && npm run lint && npm run pack"
"prepare": "cd ../../../../ && husky install"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@@ -20,32 +18,18 @@
"keywords": [ "keywords": [
"actions" "actions"
], ],
"lint-staged": {
"*.md": [
"prettier --write",
"git add ."
],
"*.{ts,json}": [
"sh -c 'npm run all'",
"git add ."
]
},
"author": "GitHub Actions", "author": "GitHub Actions",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/glob": "^0.4.0" "@actions/glob": "^0.1.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^20.6.2", "@types/node": "^12.7.12",
"@typescript-eslint/eslint-plugin": "^6.7.2", "@typescript-eslint/parser": "^5.15.0",
"@typescript-eslint/parser": "^6.7.2", "@vercel/ncc": "^0.36.0",
"@vercel/ncc": "^0.38.0", "eslint": "^8.11.0",
"eslint": "^8.47.0", "eslint-plugin-github": "^4.3.5",
"eslint-plugin-github": "^4.10.0", "prettier": "^1.19.1",
"eslint-plugin-prettier": "^5.0.0", "typescript": "^3.6.4"
"prettier": "^3.0.3",
"typescript": "^5.2.2",
"husky": "^8.0.3",
"lint-staged": "^14.0.0"
} }
} }

View File

@@ -52,13 +52,12 @@ async function run(): Promise<void> {
} }
} }
;(async () => { run()
try { .then(out => {
const out = await run()
console.log(out) console.log(out)
process.exit(0) process.exit(0)
} catch (err) { })
.catch(err => {
console.error(err) console.error(err)
process.exit(1) process.exit(1)
} })
})()

View File

@@ -4,12 +4,8 @@ PRECACHE=$2
NODE_URL=https://nodejs.org/dist NODE_URL=https://nodejs.org/dist
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download NODE12_VERSION="12.22.7"
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version. NODE16_VERSION="16.16.0"
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
NODE16_VERSION="16.20.2"
NODE20_VERSION="20.13.1"
NODE16_UNOFFICIAL_VERSION="16.20.0" # used only for win-arm64, remove node16 unofficial version when official version is available
get_abs_path() { get_abs_path() {
# exploits the fact that pwd will print abs path when no args # exploits the fact that pwd will print abs path when no args
@@ -64,16 +60,17 @@ function acquireExternalTool() {
echo "Curl version: $CURL_VERSION" echo "Curl version: $CURL_VERSION"
# curl -f Fail silently (no output at all) on HTTP errors (H) # curl -f Fail silently (no output at all) on HTTP errors (H)
# -k Allow connections to SSL sites without certs (H)
# -S Show error. With -s, make curl show errors when they occur # -S Show error. With -s, make curl show errors when they occur
# -L Follow redirects (H) # -L Follow redirects (H)
# -o FILE Write to FILE instead of stdout # -o FILE Write to FILE instead of stdout
# --retry 3 Retries transient errors 3 times (timeouts, 5xx) # --retry 3 Retries transient errors 3 times (timeouts, 5xx)
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag # Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
else else
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag # Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
fi fi
# Move the partial file to the download target. # Move the partial file to the download target.
@@ -140,10 +137,10 @@ function acquireExternalTool() {
# Download the external tools only for Windows. # Download the external tools only for Windows.
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
if [[ "$PRECACHE" != "" ]]; then if [[ "$PRECACHE" != "" ]]; then
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
fi fi
@@ -152,10 +149,8 @@ fi
# Download the external tools only for Windows. # Download the external tools only for Windows.
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
# todo: replace these with official release when available # todo: replace these with official release when available
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
if [[ "$PRECACHE" != "" ]]; then if [[ "$PRECACHE" != "" ]]; then
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
fi fi
@@ -163,30 +158,29 @@ fi
# Download the external tools only for OSX. # Download the external tools only for OSX.
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
fi fi
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
# node.js v12 doesn't support macOS on arm64. # node.js v12 doesn't support macOS on arm64.
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
fi fi
# Download the external tools for Linux PACKAGERUNTIMEs. # Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
fi fi
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
fi fi
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
fi fi

View File

@@ -114,11 +114,6 @@ var runService = function () {
); );
stopping = true; stopping = true;
} }
} else if (code === 5) {
console.log(
"Runner listener exit with Session Conflict error, stop the service, no retry needed."
);
stopping = true;
} else { } else {
var messagePrefix = "Runner listener exit with undefined return code"; var messagePrefix = "Runner listener exit with undefined return code";
unknownFailureRetryCount++; unknownFailureRetryCount++;

View File

@@ -6,29 +6,6 @@
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) { return new (P || (P = Promise))(function (resolve, reject) {
@@ -45,6 +22,13 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
}; };
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
const crypto = __importStar(__nccwpck_require__(6113)); const crypto = __importStar(__nccwpck_require__(6113));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@@ -53,7 +37,7 @@ const path = __importStar(__nccwpck_require__(1017));
const stream = __importStar(__nccwpck_require__(2781)); const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837)); const util = __importStar(__nccwpck_require__(3837));
function run() { function run() {
var _a, e_1, _b, _c; var e_1, _a;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// arg0 -> node // arg0 -> node
// arg1 -> hashFiles.js // arg1 -> hashFiles.js
@@ -72,10 +56,8 @@ function run() {
let count = 0; let count = 0;
const globber = yield glob.create(matchPatterns, { followSymbolicLinks }); const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
try { try {
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
_c = _f.value; const file = _c.value;
_d = false;
const file = _c;
console.log(file); console.log(file);
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
@@ -98,7 +80,7 @@ function run() {
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally { finally {
try { try {
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
} }
finally { if (e_1) throw e_1.error; } finally { if (e_1) throw e_1.error; }
} }
@@ -112,18 +94,15 @@ function run() {
} }
}); });
} }
; run()
(() => __awaiter(void 0, void 0, void 0, function* () { .then(out => {
try { console.log(out);
const out = yield run(); process.exit(0);
console.log(out); })
process.exit(0); .catch(err => {
} console.error(err);
catch (err) { process.exit(1);
console.error(err); });
process.exit(1);
}
}))();
/***/ }), /***/ }),
@@ -267,6 +246,7 @@ const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278); const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037)); const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
const uuid_1 = __nccwpck_require__(5840);
const oidc_utils_1 = __nccwpck_require__(8041); const oidc_utils_1 = __nccwpck_require__(8041);
/** /**
* The code to exit an action * The code to exit an action
@@ -296,9 +276,20 @@ function exportVariable(name, val) {
process.env[name] = convertedVal; process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || ''; const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) { if (filePath) {
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); const delimiter = `ghadelimiter_${uuid_1.v4()}`;
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
if (name.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedVal.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
} }
command_1.issueCommand('set-env', { name }, convertedVal);
} }
exports.exportVariable = exportVariable; exports.exportVariable = exportVariable;
/** /**
@@ -316,7 +307,7 @@ exports.setSecret = setSecret;
function addPath(inputPath) { function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || ''; const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) { if (filePath) {
file_command_1.issueFileCommand('PATH', inputPath); file_command_1.issueCommand('PATH', inputPath);
} }
else { else {
command_1.issueCommand('add-path', {}, inputPath); command_1.issueCommand('add-path', {}, inputPath);
@@ -356,10 +347,7 @@ function getMultilineInput(name, options) {
const inputs = getInput(name, options) const inputs = getInput(name, options)
.split('\n') .split('\n')
.filter(x => x !== ''); .filter(x => x !== '');
if (options && options.trimWhitespace === false) { return inputs;
return inputs;
}
return inputs.map(input => input.trim());
} }
exports.getMultilineInput = getMultilineInput; exports.getMultilineInput = getMultilineInput;
/** /**
@@ -392,12 +380,8 @@ exports.getBooleanInput = getBooleanInput;
*/ */
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) { function setOutput(name, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) {
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
process.stdout.write(os.EOL); process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); command_1.issueCommand('set-output', { name }, value);
} }
exports.setOutput = setOutput; exports.setOutput = setOutput;
/** /**
@@ -526,11 +510,7 @@ exports.group = group;
*/ */
// eslint-disable-next-line @typescript-eslint/no-explicit-any // eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) { function saveState(name, value) {
const filePath = process.env['GITHUB_STATE'] || ''; command_1.issueCommand('save-state', { name }, value);
if (filePath) {
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
} }
exports.saveState = saveState; exports.saveState = saveState;
/** /**
@@ -596,14 +576,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; exports.issueCommand = void 0;
// We use any as a valid input type // We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037)); const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(5840);
const utils_1 = __nccwpck_require__(5278); const utils_1 = __nccwpck_require__(5278);
function issueFileCommand(command, message) { function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`]; const filePath = process.env[`GITHUB_${command}`];
if (!filePath) { if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`); throw new Error(`Unable to find environment variable for file command ${command}`);
@@ -615,22 +594,7 @@ function issueFileCommand(command, message) {
encoding: 'utf8' encoding: 'utf8'
}); });
} }
exports.issueFileCommand = issueFileCommand; exports.issueCommand = issueCommand;
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
const convertedValue = utils_1.toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
}
exports.prepareKeyValueMessage = prepareKeyValueMessage;
//# sourceMappingURL=file-command.js.map //# sourceMappingURL=file-command.js.map
/***/ }), /***/ }),
@@ -1136,9 +1100,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = exports.create = void 0;
const internal_globber_1 = __nccwpck_require__(8298); const internal_globber_1 = __nccwpck_require__(8298);
const internal_hash_files_1 = __nccwpck_require__(2448);
/** /**
* Constructs a globber * Constructs a globber
* *
@@ -1151,56 +1113,17 @@ function create(patterns, options) {
}); });
} }
exports.create = create; exports.create = create;
/**
* Computes the sha256 hash of a glob
*
* @param patterns Patterns separated by newlines
* @param currentWorkspace Workspace used when matching files
* @param options Glob options
* @param verbose Enables verbose logging
*/
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
return __awaiter(this, void 0, void 0, function* () {
let followSymbolicLinks = true;
if (options && typeof options.followSymbolicLinks === 'boolean') {
followSymbolicLinks = options.followSymbolicLinks;
}
const globber = yield create(patterns, { followSymbolicLinks });
return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
});
}
exports.hashFiles = hashFiles;
//# sourceMappingURL=glob.js.map //# sourceMappingURL=glob.js.map
/***/ }), /***/ }),
/***/ 1026: /***/ 1026:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOptions = void 0; const core = __nccwpck_require__(2186);
const core = __importStar(__nccwpck_require__(2186));
/** /**
* Returns a copy with defaults filled in. * Returns a copy with defaults filled in.
*/ */
@@ -1208,7 +1131,6 @@ function getOptions(copy) {
const result = { const result = {
followSymbolicLinks: true, followSymbolicLinks: true,
implicitDescendants: true, implicitDescendants: true,
matchDirectories: true,
omitBrokenSymbolicLinks: true omitBrokenSymbolicLinks: true
}; };
if (copy) { if (copy) {
@@ -1220,10 +1142,6 @@ function getOptions(copy) {
result.implicitDescendants = copy.implicitDescendants; result.implicitDescendants = copy.implicitDescendants;
core.debug(`implicitDescendants '${result.implicitDescendants}'`); core.debug(`implicitDescendants '${result.implicitDescendants}'`);
} }
if (typeof copy.matchDirectories === 'boolean') {
result.matchDirectories = copy.matchDirectories;
core.debug(`matchDirectories '${result.matchDirectories}'`);
}
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
@@ -1241,25 +1159,6 @@ exports.getOptions = getOptions;
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) { return new (P || (P = Promise))(function (resolve, reject) {
@@ -1289,12 +1188,11 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DefaultGlobber = void 0; const core = __nccwpck_require__(2186);
const core = __importStar(__nccwpck_require__(2186)); const fs = __nccwpck_require__(7147);
const fs = __importStar(__nccwpck_require__(7147)); const globOptionsHelper = __nccwpck_require__(1026);
const globOptionsHelper = __importStar(__nccwpck_require__(1026)); const path = __nccwpck_require__(1017);
const path = __importStar(__nccwpck_require__(1017)); const patternHelper = __nccwpck_require__(9005);
const patternHelper = __importStar(__nccwpck_require__(9005));
const internal_match_kind_1 = __nccwpck_require__(1063); const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_pattern_1 = __nccwpck_require__(4536); const internal_pattern_1 = __nccwpck_require__(4536);
const internal_search_state_1 = __nccwpck_require__(9117); const internal_search_state_1 = __nccwpck_require__(9117);
@@ -1340,7 +1238,7 @@ class DefaultGlobber {
if (options.implicitDescendants && if (options.implicitDescendants &&
(pattern.trailingSeparator || (pattern.trailingSeparator ||
pattern.segments[pattern.segments.length - 1] !== '**')) { pattern.segments[pattern.segments.length - 1] !== '**')) {
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
} }
} }
// Push the search paths // Push the search paths
@@ -1383,7 +1281,7 @@ class DefaultGlobber {
// Directory // Directory
if (stats.isDirectory()) { if (stats.isDirectory()) {
// Matched // Matched
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { if (match & internal_match_kind_1.MatchKind.Directory) {
yield yield __await(item.path); yield yield __await(item.path);
} }
// Descend? // Descend?
@@ -1478,117 +1376,12 @@ exports.DefaultGlobber = DefaultGlobber;
/***/ }), /***/ }),
/***/ 2448:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = void 0;
const crypto = __importStar(__nccwpck_require__(6113));
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const path = __importStar(__nccwpck_require__(1017));
function hashFiles(globber, currentWorkspace, verbose = false) {
var e_1, _a;
var _b;
return __awaiter(this, void 0, void 0, function* () {
const writeDelegate = verbose ? core.info : core.debug;
let hasMatch = false;
const githubWorkspace = currentWorkspace
? currentWorkspace
: (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
const result = crypto.createHash('sha256');
let count = 0;
try {
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
const file = _d.value;
writeDelegate(file);
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto.createHash('sha256');
const pipeline = util.promisify(stream.pipeline);
yield pipeline(fs.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
hasMatch = true;
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
result.end();
if (hasMatch) {
writeDelegate(`Found ${count} files to hash.`);
return result.digest('hex');
}
else {
writeDelegate(`No matches found for glob`);
return '';
}
});
}
exports.hashFiles = hashFiles;
//# sourceMappingURL=internal-hash-files.js.map
/***/ }),
/***/ 1063: /***/ 1063:
/***/ ((__unused_webpack_module, exports) => { /***/ ((__unused_webpack_module, exports) => {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.MatchKind = void 0;
/** /**
* Indicates whether a pattern matches a path * Indicates whether a pattern matches a path
*/ */
@@ -1608,36 +1401,13 @@ var MatchKind;
/***/ }), /***/ }),
/***/ 1849: /***/ 1849:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const assert = __nccwpck_require__(9491);
const path = __importStar(__nccwpck_require__(1017)); const path = __nccwpck_require__(1017);
const assert_1 = __importDefault(__nccwpck_require__(9491));
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
/** /**
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
@@ -1677,8 +1447,8 @@ exports.dirname = dirname;
* or `C:` are expanded based on the current working directory. * or `C:` are expanded based on the current working directory.
*/ */
function ensureAbsoluteRoot(root, itemPath) { function ensureAbsoluteRoot(root, itemPath) {
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
// Already rooted // Already rooted
if (hasAbsoluteRoot(itemPath)) { if (hasAbsoluteRoot(itemPath)) {
return itemPath; return itemPath;
@@ -1688,7 +1458,7 @@ function ensureAbsoluteRoot(root, itemPath) {
// Check for itemPath like C: or C:foo // Check for itemPath like C: or C:foo
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
let cwd = process.cwd(); let cwd = process.cwd();
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
// Drive letter matches cwd? Expand to cwd // Drive letter matches cwd? Expand to cwd
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
// Drive only, e.g. C: // Drive only, e.g. C:
@@ -1713,11 +1483,11 @@ function ensureAbsoluteRoot(root, itemPath) {
// Check for itemPath like \ or \foo // Check for itemPath like \ or \foo
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
const cwd = process.cwd(); const cwd = process.cwd();
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
return `${cwd[0]}:\\${itemPath.substr(1)}`; return `${cwd[0]}:\\${itemPath.substr(1)}`;
} }
} }
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
// Otherwise ensure root ends with a separator // Otherwise ensure root ends with a separator
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
// Intentionally empty // Intentionally empty
@@ -1734,7 +1504,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
* `\\hello\share` and `C:\hello` (and using alternate separator). * `\\hello\share` and `C:\hello` (and using alternate separator).
*/ */
function hasAbsoluteRoot(itemPath) { function hasAbsoluteRoot(itemPath) {
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
// Normalize separators // Normalize separators
itemPath = normalizeSeparators(itemPath); itemPath = normalizeSeparators(itemPath);
// Windows // Windows
@@ -1751,7 +1521,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
*/ */
function hasRoot(itemPath) { function hasRoot(itemPath) {
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
// Normalize separators // Normalize separators
itemPath = normalizeSeparators(itemPath); itemPath = normalizeSeparators(itemPath);
// Windows // Windows
@@ -1813,37 +1583,14 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
/***/ }), /***/ }),
/***/ 6836: /***/ 6836:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Path = void 0; const assert = __nccwpck_require__(9491);
const path = __importStar(__nccwpck_require__(1017)); const path = __nccwpck_require__(1017);
const pathHelper = __importStar(__nccwpck_require__(1849)); const pathHelper = __nccwpck_require__(1849);
const assert_1 = __importDefault(__nccwpck_require__(9491));
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
/** /**
* Helper class for parsing paths into segments * Helper class for parsing paths into segments
@@ -1857,7 +1604,7 @@ class Path {
this.segments = []; this.segments = [];
// String // String
if (typeof itemPath === 'string') { if (typeof itemPath === 'string') {
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); assert(itemPath, `Parameter 'itemPath' must not be empty`);
// Normalize slashes and trim unnecessary trailing slash // Normalize slashes and trim unnecessary trailing slash
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
// Not rooted // Not rooted
@@ -1884,24 +1631,24 @@ class Path {
// Array // Array
else { else {
// Must not be empty // Must not be empty
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
// Each segment // Each segment
for (let i = 0; i < itemPath.length; i++) { for (let i = 0; i < itemPath.length; i++) {
let segment = itemPath[i]; let segment = itemPath[i];
// Must not be empty // Must not be empty
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
// Normalize slashes // Normalize slashes
segment = pathHelper.normalizeSeparators(itemPath[i]); segment = pathHelper.normalizeSeparators(itemPath[i]);
// Root segment // Root segment
if (i === 0 && pathHelper.hasRoot(segment)) { if (i === 0 && pathHelper.hasRoot(segment)) {
segment = pathHelper.safeTrimTrailingSeparator(segment); segment = pathHelper.safeTrimTrailingSeparator(segment);
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
this.segments.push(segment); this.segments.push(segment);
} }
// All other segments // All other segments
else { else {
// Must not contain slash // Must not contain slash
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
this.segments.push(segment); this.segments.push(segment);
} }
} }
@@ -1933,32 +1680,12 @@ exports.Path = Path;
/***/ }), /***/ }),
/***/ 9005: /***/ 9005:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper = __nccwpck_require__(1849);
const pathHelper = __importStar(__nccwpck_require__(1849));
const internal_match_kind_1 = __nccwpck_require__(1063); const internal_match_kind_1 = __nccwpck_require__(1063);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
/** /**
@@ -2034,44 +1761,21 @@ exports.partialMatch = partialMatch;
/***/ }), /***/ }),
/***/ 4536: /***/ 4536:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Pattern = void 0; const assert = __nccwpck_require__(9491);
const os = __importStar(__nccwpck_require__(2037)); const os = __nccwpck_require__(2037);
const path = __importStar(__nccwpck_require__(1017)); const path = __nccwpck_require__(1017);
const pathHelper = __importStar(__nccwpck_require__(1849)); const pathHelper = __nccwpck_require__(1849);
const assert_1 = __importDefault(__nccwpck_require__(9491));
const minimatch_1 = __nccwpck_require__(3973); const minimatch_1 = __nccwpck_require__(3973);
const internal_match_kind_1 = __nccwpck_require__(1063); const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_path_1 = __nccwpck_require__(6836); const internal_path_1 = __nccwpck_require__(6836);
const IS_WINDOWS = process.platform === 'win32'; const IS_WINDOWS = process.platform === 'win32';
class Pattern { class Pattern {
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { constructor(patternOrNegate, segments) {
/** /**
* Indicates whether matches should be excluded from the result set * Indicates whether matches should be excluded from the result set
*/ */
@@ -2085,9 +1789,9 @@ class Pattern {
else { else {
// Convert to pattern // Convert to pattern
segments = segments || []; segments = segments || [];
assert_1.default(segments.length, `Parameter 'segments' must not empty`); assert(segments.length, `Parameter 'segments' must not empty`);
const root = Pattern.getLiteral(segments[0]); const root = Pattern.getLiteral(segments[0]);
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
pattern = new internal_path_1.Path(segments).toString().trim(); pattern = new internal_path_1.Path(segments).toString().trim();
if (patternOrNegate) { if (patternOrNegate) {
pattern = `!${pattern}`; pattern = `!${pattern}`;
@@ -2099,7 +1803,7 @@ class Pattern {
pattern = pattern.substr(1).trim(); pattern = pattern.substr(1).trim();
} }
// Normalize slashes and ensures absolute root // Normalize slashes and ensures absolute root
pattern = Pattern.fixupPattern(pattern, homedir); pattern = Pattern.fixupPattern(pattern);
// Segments // Segments
this.segments = new internal_path_1.Path(pattern).segments; this.segments = new internal_path_1.Path(pattern).segments;
// Trailing slash indicates the pattern should only match directories, not regular files // Trailing slash indicates the pattern should only match directories, not regular files
@@ -2115,7 +1819,6 @@ class Pattern {
this.searchPath = new internal_path_1.Path(searchSegments).toString(); this.searchPath = new internal_path_1.Path(searchSegments).toString();
// Root RegExp (required when determining partial match) // Root RegExp (required when determining partial match)
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
this.isImplicitPattern = isImplicitPattern;
// Create minimatch // Create minimatch
const minimatchOptions = { const minimatchOptions = {
dot: true, dot: true,
@@ -2137,11 +1840,11 @@ class Pattern {
// Normalize slashes // Normalize slashes
itemPath = pathHelper.normalizeSeparators(itemPath); itemPath = pathHelper.normalizeSeparators(itemPath);
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately // Append a trailing slash. Otherwise Minimatch will not match the directory immediately
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns // preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { if (!itemPath.endsWith(path.sep)) {
// Note, this is safe because the constructor ensures the pattern has an absolute root. // Note, this is safe because the constructor ensures the pattern has an absolute root.
// For example, formats like C: and C:foo on Windows are resolved to an absolute root. // For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
itemPath = `${itemPath}${path.sep}`; itemPath = `${itemPath}${path.sep}`;
} }
} }
@@ -2179,15 +1882,15 @@ class Pattern {
/** /**
* Normalizes slashes and ensures absolute root * Normalizes slashes and ensures absolute root
*/ */
static fixupPattern(pattern, homedir) { static fixupPattern(pattern) {
// Empty // Empty
assert_1.default(pattern, 'pattern cannot be empty'); assert(pattern, 'pattern cannot be empty');
// Must not contain `.` segment, unless first segment // Must not contain `.` segment, unless first segment
// Must not contain `..` segment // Must not contain `..` segment
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
// Normalize slashes // Normalize slashes
pattern = pathHelper.normalizeSeparators(pattern); pattern = pathHelper.normalizeSeparators(pattern);
// Replace leading `.` segment // Replace leading `.` segment
@@ -2196,9 +1899,9 @@ class Pattern {
} }
// Replace leading `~` segment // Replace leading `~` segment
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
homedir = homedir || os.homedir(); const homedir = os.homedir();
assert_1.default(homedir, 'Unable to determine HOME directory'); assert(homedir, 'Unable to determine HOME directory');
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
pattern = Pattern.globEscape(homedir) + pattern.substr(1); pattern = Pattern.globEscape(homedir) + pattern.substr(1);
} }
// Replace relative drive root, e.g. pattern is C: or C:foo // Replace relative drive root, e.g. pattern is C: or C:foo
@@ -2301,7 +2004,6 @@ exports.Pattern = Pattern;
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.SearchState = void 0;
class SearchState { class SearchState {
constructor(path, level) { constructor(path, level) {
this.path = path; this.path = path;
@@ -2530,19 +2232,6 @@ class HttpClientResponse {
})); }));
}); });
} }
readBodyBuffer() {
return __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
const chunks = [];
this.message.on('data', (chunk) => {
chunks.push(chunk);
});
this.message.on('end', () => {
resolve(Buffer.concat(chunks));
});
}));
});
}
} }
exports.HttpClientResponse = HttpClientResponse; exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) { function isHttps(requestUrl) {
@@ -3047,13 +2736,7 @@ function getProxyUrl(reqUrl) {
} }
})(); })();
if (proxyVar) { if (proxyVar) {
try { return new URL(proxyVar);
return new URL(proxyVar);
}
catch (_a) {
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
return new URL(`http://${proxyVar}`);
}
} }
else { else {
return undefined; return undefined;
@@ -3064,10 +2747,6 @@ function checkBypass(reqUrl) {
if (!reqUrl.hostname) { if (!reqUrl.hostname) {
return false; return false;
} }
const reqHost = reqUrl.hostname;
if (isLoopbackAddress(reqHost)) {
return true;
}
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) { if (!noProxy) {
return false; return false;
@@ -3093,24 +2772,13 @@ function checkBypass(reqUrl) {
.split(',') .split(',')
.map(x => x.trim().toUpperCase()) .map(x => x.trim().toUpperCase())
.filter(x => x)) { .filter(x => x)) {
if (upperNoProxyItem === '*' || if (upperReqHosts.some(x => x === upperNoProxyItem)) {
upperReqHosts.some(x => x === upperNoProxyItem ||
x.endsWith(`.${upperNoProxyItem}`) ||
(upperNoProxyItem.startsWith('.') &&
x.endsWith(`${upperNoProxyItem}`)))) {
return true; return true;
} }
} }
return false; return false;
} }
exports.checkBypass = checkBypass; exports.checkBypass = checkBypass;
function isLoopbackAddress(host) {
const hostLower = host.toLowerCase();
return (hostLower === 'localhost' ||
hostLower.startsWith('127.') ||
hostLower.startsWith('[::1]') ||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
}
//# sourceMappingURL=proxy.js.map //# sourceMappingURL=proxy.js.map
/***/ }), /***/ }),
@@ -3149,9 +2817,6 @@ function range(a, b, str) {
var i = ai; var i = ai;
if (ai >= 0 && bi > 0) { if (ai >= 0 && bi > 0) {
if(a===b) {
return [ai, bi];
}
begs = []; begs = [];
left = str.length; left = str.length;

View File

@@ -2,7 +2,7 @@
SET UPDATEFILE=update.finished SET UPDATEFILE=update.finished
"%~dp0\bin\Runner.Listener.exe" run %* "%~dp0\bin\Runner.Listener.exe" run %*
rem using `if %ERRORLEVEL% EQU N` instead of `if ERRORLEVEL N` rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
rem `if ERRORLEVEL N` means: error level is N or MORE rem `if ERRORLEVEL N` means: error level is N or MORE
if %ERRORLEVEL% EQU 0 ( if %ERRORLEVEL% EQU 0 (
@@ -49,10 +49,5 @@ if %ERRORLEVEL% EQU 4 (
exit /b 1 exit /b 1
) )
if %ERRORLEVEL% EQU 5 (
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
exit /b 0
)
echo "Exiting after unknown error code: %ERRORLEVEL%" echo "Exiting after unknown error code: %ERRORLEVEL%"
exit /b 0 exit /b 0

View File

@@ -70,9 +70,6 @@ elif [[ $returnCode == 4 ]]; then
"$DIR"/safe_sleep.sh 1 "$DIR"/safe_sleep.sh 1
done done
exit 2 exit 2
elif [[ $returnCode == 5 ]]; then
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
exit 0
else else
echo "Exiting with unknown error code: ${returnCode}" echo "Exiting with unknown error code: ${returnCode}"
exit 0 exit 0

View File

@@ -38,7 +38,7 @@ runWithManualTrap() {
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
"$DIR"/run-helper.sh $* & "$DIR"/run-helper.sh $* &
PID=$! PID=$!
wait $PID wait -f $PID
returnCode=$? returnCode=$?
if [[ $returnCode -eq 2 ]]; then if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..." echo "Restarting runner..."
@@ -84,4 +84,4 @@ if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $* run $*
else else
runWithManualTrap $* runWithManualTrap $*
fi fi

57
src/Misc/runnercoreassets Normal file
View File

@@ -0,0 +1,57 @@
actions.runner.plist.template
actions.runner.service.template
checkScripts/downloadCert.js
checkScripts/makeWebRequest.js
darwin.svc.sh.template
hashFiles/index.js
installdependencies.sh
macos-run-invoker.js
Microsoft.IdentityModel.Logging.dll
Microsoft.IdentityModel.Tokens.dll
Minimatch.dll
Newtonsoft.Json.Bson.dll
Newtonsoft.Json.dll
Runner.Common.deps.json
Runner.Common.dll
Runner.Common.pdb
Runner.Listener
Runner.Listener.deps.json
Runner.Listener.dll
Runner.Listener.exe
Runner.Listener.pdb
Runner.Listener.runtimeconfig.json
Runner.PluginHost
Runner.PluginHost.deps.json
Runner.PluginHost.dll
Runner.PluginHost.exe
Runner.PluginHost.pdb
Runner.PluginHost.runtimeconfig.json
Runner.Plugins.deps.json
Runner.Plugins.dll
Runner.Plugins.pdb
Runner.Sdk.deps.json
Runner.Sdk.dll
Runner.Sdk.pdb
Runner.Worker
Runner.Worker.deps.json
Runner.Worker.dll
Runner.Worker.exe
Runner.Worker.pdb
Runner.Worker.runtimeconfig.json
RunnerService.exe
RunnerService.exe.config
RunnerService.js
RunnerService.pdb
runsvc.sh
Sdk.deps.json
Sdk.dll
Sdk.pdb
System.IdentityModel.Tokens.Jwt.dll
System.Net.Http.Formatting.dll
System.Security.Cryptography.Pkcs.dll
System.Security.Cryptography.ProtectedData.dll
System.ServiceProcess.ServiceController.dll
systemd.svc.sh.template
update.cmd.template
update.sh.template
YamlDotNet.dll

View File

@@ -0,0 +1,267 @@
api-ms-win-core-console-l1-1-0.dll
api-ms-win-core-console-l1-2-0.dll
api-ms-win-core-datetime-l1-1-0.dll
api-ms-win-core-debug-l1-1-0.dll
api-ms-win-core-errorhandling-l1-1-0.dll
api-ms-win-core-fibers-l1-1-0.dll
api-ms-win-core-file-l1-1-0.dll
api-ms-win-core-file-l1-2-0.dll
api-ms-win-core-file-l2-1-0.dll
api-ms-win-core-handle-l1-1-0.dll
api-ms-win-core-heap-l1-1-0.dll
api-ms-win-core-interlocked-l1-1-0.dll
api-ms-win-core-libraryloader-l1-1-0.dll
api-ms-win-core-localization-l1-2-0.dll
api-ms-win-core-memory-l1-1-0.dll
api-ms-win-core-namedpipe-l1-1-0.dll
api-ms-win-core-processenvironment-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-1.dll
api-ms-win-core-profile-l1-1-0.dll
api-ms-win-core-rtlsupport-l1-1-0.dll
api-ms-win-core-string-l1-1-0.dll
api-ms-win-core-synch-l1-1-0.dll
api-ms-win-core-synch-l1-2-0.dll
api-ms-win-core-sysinfo-l1-1-0.dll
api-ms-win-core-timezone-l1-1-0.dll
api-ms-win-core-util-l1-1-0.dll
api-ms-win-crt-conio-l1-1-0.dll
api-ms-win-crt-convert-l1-1-0.dll
api-ms-win-crt-environment-l1-1-0.dll
api-ms-win-crt-filesystem-l1-1-0.dll
api-ms-win-crt-heap-l1-1-0.dll
api-ms-win-crt-locale-l1-1-0.dll
api-ms-win-crt-math-l1-1-0.dll
api-ms-win-crt-multibyte-l1-1-0.dll
api-ms-win-crt-private-l1-1-0.dll
api-ms-win-crt-process-l1-1-0.dll
api-ms-win-crt-runtime-l1-1-0.dll
api-ms-win-crt-stdio-l1-1-0.dll
api-ms-win-crt-string-l1-1-0.dll
api-ms-win-crt-time-l1-1-0.dll
api-ms-win-crt-utility-l1-1-0.dll
clrcompression.dll
clretwrc.dll
clrjit.dll
coreclr.dll
createdump
createdump.exe
dbgshim.dll
hostfxr.dll
hostpolicy.dll
libclrjit.dylib
libclrjit.so
libcoreclr.dylib
libcoreclr.so
libcoreclrtraceptprovider.so
libdbgshim.dylib
libdbgshim.so
libhostfxr.dylib
libhostfxr.so
libhostpolicy.dylib
libhostpolicy.so
libmscordaccore.dylib
libmscordaccore.so
libmscordbi.dylib
libmscordbi.so
Microsoft.CSharp.dll
Microsoft.DiaSymReader.Native.amd64.dll
Microsoft.DiaSymReader.Native.arm64.dll
Microsoft.VisualBasic.Core.dll
Microsoft.VisualBasic.dll
Microsoft.Win32.Primitives.dll
Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll
mscorrc.dll
msquic.dll
netstandard.dll
SOS_README.md
System.AppContext.dll
System.Buffers.dll
System.Collections.Concurrent.dll
System.Collections.dll
System.Collections.Immutable.dll
System.Collections.NonGeneric.dll
System.Collections.Specialized.dll
System.ComponentModel.Annotations.dll
System.ComponentModel.DataAnnotations.dll
System.ComponentModel.dll
System.ComponentModel.EventBasedAsync.dll
System.ComponentModel.Primitives.dll
System.ComponentModel.TypeConverter.dll
System.Configuration.dll
System.Console.dll
System.Core.dll
System.Data.Common.dll
System.Data.DataSetExtensions.dll
System.Data.dll
System.Diagnostics.Contracts.dll
System.Diagnostics.Debug.dll
System.Diagnostics.DiagnosticSource.dll
System.Diagnostics.FileVersionInfo.dll
System.Diagnostics.Process.dll
System.Diagnostics.StackTrace.dll
System.Diagnostics.TextWriterTraceListener.dll
System.Diagnostics.Tools.dll
System.Diagnostics.TraceSource.dll
System.Diagnostics.Tracing.dll
System.dll
System.Drawing.dll
System.Drawing.Primitives.dll
System.Dynamic.Runtime.dll
System.Formats.Asn1.dll
System.Globalization.Calendars.dll
System.Globalization.dll
System.Globalization.Extensions.dll
System.Globalization.Native.dylib
System.Globalization.Native.so
System.IO.Compression.Brotli.dll
System.IO.Compression.dll
System.IO.Compression.FileSystem.dll
System.IO.Compression.Native.a
System.IO.Compression.Native.dll
System.IO.Compression.Native.dylib
System.IO.Compression.Native.so
System.IO.Compression.ZipFile.dll
System.IO.dll
System.IO.FileSystem.AccessControl.dll
System.IO.FileSystem.dll
System.IO.FileSystem.DriveInfo.dll
System.IO.FileSystem.Primitives.dll
System.IO.FileSystem.Watcher.dll
System.IO.IsolatedStorage.dll
System.IO.MemoryMappedFiles.dll
System.IO.Pipes.AccessControl.dll
System.IO.Pipes.dll
System.IO.UnmanagedMemoryStream.dll
System.Linq.dll
System.Linq.Expressions.dll
System.Linq.Parallel.dll
System.Linq.Queryable.dll
System.Memory.dll
System.Native.a
System.Native.dylib
System.Native.so
System.Net.dll
System.Net.Http.dll
System.Net.Http.Json.dll
System.Net.Http.Native.a
System.Net.Http.Native.dylib
System.Net.Http.Native.so
System.Net.HttpListener.dll
System.Net.Mail.dll
System.Net.NameResolution.dll
System.Net.NetworkInformation.dll
System.Net.Ping.dll
System.Net.Primitives.dll
System.Net.Quic.dll
System.Net.Requests.dll
System.Net.Security.dll
System.Net.Security.Native.a
System.Net.Security.Native.dylib
System.Net.Security.Native.so
System.Net.ServicePoint.dll
System.Net.Sockets.dll
System.Net.WebClient.dll
System.Net.WebHeaderCollection.dll
System.Net.WebProxy.dll
System.Net.WebSockets.Client.dll
System.Net.WebSockets.dll
System.Numerics.dll
System.Numerics.Vectors.dll
System.ObjectModel.dll
System.Private.CoreLib.dll
System.Private.DataContractSerialization.dll
System.Private.Uri.dll
System.Private.Xml.dll
System.Private.Xml.Linq.dll
System.Reflection.DispatchProxy.dll
System.Reflection.dll
System.Reflection.Emit.dll
System.Reflection.Emit.ILGeneration.dll
System.Reflection.Emit.Lightweight.dll
System.Reflection.Extensions.dll
System.Reflection.Metadata.dll
System.Reflection.Primitives.dll
System.Reflection.TypeExtensions.dll
System.Resources.Reader.dll
System.Resources.ResourceManager.dll
System.Resources.Writer.dll
System.Runtime.CompilerServices.Unsafe.dll
System.Runtime.CompilerServices.VisualC.dll
System.Runtime.dll
System.Runtime.Extensions.dll
System.Runtime.Handles.dll
System.Runtime.InteropServices.dll
System.Runtime.InteropServices.RuntimeInformation.dll
System.Runtime.InteropServices.WindowsRuntime.dll
System.Runtime.Intrinsics.dll
System.Runtime.Loader.dll
System.Runtime.Numerics.dll
System.Runtime.Serialization.dll
System.Runtime.Serialization.Formatters.dll
System.Runtime.Serialization.Json.dll
System.Runtime.Serialization.Primitives.dll
System.Runtime.Serialization.Xml.dll
System.Runtime.WindowsRuntime.dll
System.Runtime.WindowsRuntime.UI.Xaml.dll
System.Security.AccessControl.dll
System.Security.Claims.dll
System.Security.Cryptography.Algorithms.dll
System.Security.Cryptography.Cng.dll
System.Security.Cryptography.Csp.dll
System.Security.Cryptography.Encoding.dll
System.Security.Cryptography.Native.Apple.a
System.Security.Cryptography.Native.Apple.dylib
System.Security.Cryptography.Native.OpenSsl.a
System.Security.Cryptography.Native.OpenSsl.dylib
System.Security.Cryptography.Native.OpenSsl.so
System.Security.Cryptography.OpenSsl.dll
System.Security.Cryptography.Primitives.dll
System.Security.Cryptography.X509Certificates.dll
System.Security.Cryptography.XCertificates.dll
System.Security.dll
System.Security.Principal.dll
System.Security.Principal.Windows.dll
System.Security.SecureString.dll
System.ServiceModel.Web.dll
System.ServiceProcess.dll
System.Text.Encoding.CodePages.dll
System.Text.Encoding.dll
System.Text.Encoding.Extensions.dll
System.Text.Encodings.Web.dll
System.Text.Json.dll
System.Text.RegularExpressions.dll
System.Threading.Channels.dll
System.Threading.dll
System.Threading.Overlapped.dll
System.Threading.Tasks.Dataflow.dll
System.Threading.Tasks.dll
System.Threading.Tasks.Extensions.dll
System.Threading.Tasks.Parallel.dll
System.Threading.Thread.dll
System.Threading.ThreadPool.dll
System.Threading.Timer.dll
System.Transactions.dll
System.Transactions.Local.dll
System.ValueTuple.dll
System.Web.dll
System.Web.HttpUtility.dll
System.Windows.dll
System.Xml.dll
System.Xml.Linq.dll
System.Xml.ReaderWriter.dll
System.Xml.Serialization.dll
System.Xml.XDocument.dll
System.Xml.XmlDocument.dll
System.Xml.XmlSerializer.dll
System.Xml.XPath.dll
System.Xml.XPath.XDocument.dll
ucrtbase.dll
WindowsBase.dll

View File

@@ -0,0 +1,24 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -0,0 +1,24 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;

View File

@@ -1,4 +1,4 @@
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
public enum ActionResult public enum ActionResult
{ {
@@ -10,4 +10,4 @@
Skipped = 3 Skipped = 3
} }
} }

View File

@@ -20,12 +20,12 @@ namespace GitHub.Runner.Common
{ {
private bool _hasConnection; private bool _hasConnection;
private VssConnection _connection; private VssConnection _connection;
private ActionsRunServerHttpClient _actionsRunServerClient; private TaskAgentHttpClient _taskAgentClient;
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials) public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
{ {
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100)); _connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
_actionsRunServerClient = _connection.GetClient<ActionsRunServerHttpClient>(); _taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
_hasConnection = true; _hasConnection = true;
} }
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common
CheckConnection(); CheckConnection();
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () => var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
{ {
return await _actionsRunServerClient.GetJobMessageAsync(id, cancellationToken); return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
}, cancellationToken); }, cancellationToken);
return jobMessage; return jobMessage;

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -17,14 +17,7 @@ namespace GitHub.Runner.Common
{ {
Task ConnectAsync(Uri serverUrl, VssCredentials credentials); Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken); Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
Task DeleteSessionAsync(CancellationToken cancellationToken);
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
Task ForceRefreshConnection(VssCredentials credentials);
} }
public sealed class BrokerServer : RunnerService, IBrokerServer public sealed class BrokerServer : RunnerService, IBrokerServer
@@ -51,53 +44,13 @@ namespace GitHub.Runner.Common
} }
} }
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken) public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{ {
CheckConnection(); CheckConnection();
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken); var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage; return jobMessage;
} }
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
{
CheckConnection();
var brokerSession = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
return brokerSession;
}
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
{
CheckConnection();
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
}
public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials)
{
if (_brokerUri != serverUri || !_hasConnection)
{
return ConnectAsync(serverUri, credentials);
}
return Task.CompletedTask;
}
public Task ForceRefreshConnection(VssCredentials credentials)
{
return ConnectAsync(_brokerUri, credentials);
}
public bool ShouldRetryException(Exception ex)
{
if (ex is AccessDeniedException ade)
{
return false;
}
return true;
}
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using GitHub.DistributedTask.Logging; using GitHub.DistributedTask.Logging;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
private bool? _isHostedServer; private bool? _isHostedServer;
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public ulong AgentId { get; set; } public int AgentId { get; set; }
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public string AgentName { get; set; } public string AgentName { get; set; }

View File

@@ -1,4 +1,4 @@
using System; using System;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
@@ -69,8 +69,6 @@ namespace GitHub.Runner.Common
public static readonly OSPlatform Platform = OSPlatform.OSX; public static readonly OSPlatform Platform = OSPlatform.OSX;
#elif OS_WINDOWS #elif OS_WINDOWS
public static readonly OSPlatform Platform = OSPlatform.Windows; public static readonly OSPlatform Platform = OSPlatform.Windows;
#else
public static readonly OSPlatform Platform = OSPlatform.Linux;
#endif #endif
#if X86 #if X86
@@ -81,8 +79,6 @@ namespace GitHub.Runner.Common
public static readonly Architecture PlatformArchitecture = Architecture.Arm; public static readonly Architecture PlatformArchitecture = Architecture.Arm;
#elif ARM64 #elif ARM64
public static readonly Architecture PlatformArchitecture = Architecture.Arm64; public static readonly Architecture PlatformArchitecture = Architecture.Arm64;
#else
public static readonly Architecture PlatformArchitecture = Architecture.X64;
#endif #endif
public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30); public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30);
@@ -136,7 +132,6 @@ namespace GitHub.Runner.Common
public static readonly string GenerateServiceConfig = "generateServiceConfig"; public static readonly string GenerateServiceConfig = "generateServiceConfig";
public static readonly string Help = "help"; public static readonly string Help = "help";
public static readonly string Local = "local"; public static readonly string Local = "local";
public static readonly string NoDefaultLabels = "no-default-labels";
public static readonly string Replace = "replace"; public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate"; public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it public static readonly string Once = "once"; // Keep this around since customers still relies on it
@@ -153,14 +148,12 @@ namespace GitHub.Runner.Common
public const int RetryableError = 2; public const int RetryableError = 2;
public const int RunnerUpdating = 3; public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4; public const int RunOnceRunnerUpdating = 4;
public const int SessionConflict = 5;
} }
public static class Features public static class Features
{ {
public static readonly string DiskSpaceWarning = "runner.diskspace.warning"; public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action"; public static readonly string Node12Warning = "DistributedTask.AddWarningToNode12Action";
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate"; public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks"; public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
} }
@@ -176,14 +169,9 @@ namespace GitHub.Runner.Common
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/."; public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/.";
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
public static readonly string DeprecatedNodeVersion = "node16";
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/"; public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings"; public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
public static readonly string EnforcedNode16DetectedAfterEndOfLife = "The following actions use a deprecated Node.js version and will be forced to run on node20: {0}. For more info: https://github.blog/changelog/2024-03-07-github-actions-all-actions-will-run-on-node20-instead-of-node16-by-default/";
public static readonly string EnforcedNode16DetectedAfterEndOfLifeEnvVariable = "Node20ForceActionsWarnings";
} }
public static class RunnerEvent public static class RunnerEvent
@@ -255,7 +243,6 @@ namespace GitHub.Runner.Common
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG"; public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG"; public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION"; public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
} }
public static class Agent public static class Agent
@@ -266,8 +253,6 @@ namespace GitHub.Runner.Common
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION"; public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION"; public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT"; public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
public static readonly string ActionArchiveCacheDirectory = "ACTIONS_RUNNER_ACTION_ARCHIVE_CACHE";
public static readonly string ManualForceActionsToNode20 = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE20";
} }
public static class System public static class System
@@ -278,12 +263,7 @@ namespace GitHub.Runner.Common
public static readonly string AccessToken = "system.accessToken"; public static readonly string AccessToken = "system.accessToken";
public static readonly string Culture = "system.culture"; public static readonly string Culture = "system.culture";
public static readonly string PhaseDisplayName = "system.phaseDisplayName"; public static readonly string PhaseDisplayName = "system.phaseDisplayName";
public static readonly string JobRequestType = "system.jobRequestType";
public static readonly string OrchestrationId = "system.orchestrationId"; public static readonly string OrchestrationId = "system.orchestrationId";
public static readonly string TestDotNet8Compatibility = "system.testDotNet8Compatibility";
public static readonly string DotNet8CompatibilityOutputLength = "system.dotNet8CompatibilityOutputLength";
public static readonly string DotNet8CompatibilityOutputPattern = "system.dotNet8CompatibilityOutputPattern";
public static readonly string DotNet8CompatibilityWarning = "system.dotNet8CompatibilityWarning";
} }
} }

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
@@ -36,7 +36,6 @@ namespace GitHub.Runner.Common
event EventHandler Unloading; event EventHandler Unloading;
void ShutdownRunner(ShutdownReason reason); void ShutdownRunner(ShutdownReason reason);
void WritePerfCounter(string counter); void WritePerfCounter(string counter);
void LoadDefaultUserAgents();
} }
public enum StartupType public enum StartupType
@@ -68,7 +67,6 @@ namespace GitHub.Runner.Common
private StartupType _startupType; private StartupType _startupType;
private string _perfFile; private string _perfFile;
private RunnerWebProxy _webProxy = new(); private RunnerWebProxy _webProxy = new();
private string _hostType = string.Empty;
public event EventHandler Unloading; public event EventHandler Unloading;
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token; public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
@@ -80,7 +78,6 @@ namespace GitHub.Runner.Common
{ {
// Validate args. // Validate args.
ArgUtil.NotNullOrEmpty(hostType, nameof(hostType)); ArgUtil.NotNullOrEmpty(hostType, nameof(hostType));
_hostType = hostType;
_loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly); _loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly);
_loadContext.Unloading += LoadContext_Unloading; _loadContext.Unloading += LoadContext_Unloading;
@@ -199,23 +196,14 @@ namespace GitHub.Runner.Common
} }
} }
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
}
LoadDefaultUserAgents();
}
public void LoadDefaultUserAgents()
{
if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress)) if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress))
{ {
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)"); _trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
} }
else
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{ {
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString)); _trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
} }
var credFile = GetConfigFile(WellKnownConfigFile.Credentials); var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
@@ -252,11 +240,6 @@ namespace GitHub.Runner.Common
_trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests."); _trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests.");
_userAgents.Add(extraUserAgentHeader); _userAgents.Add(extraUserAgentHeader);
} }
var currentProcess = Process.GetCurrentProcess();
_userAgents.Add(new ProductInfoHeaderValue("Pid", currentProcess.Id.ToString()));
_userAgents.Add(new ProductInfoHeaderValue("CreationTime", Uri.EscapeDataString(DateTime.UtcNow.ToString("O"))));
_userAgents.Add(new ProductInfoHeaderValue($"({_hostType})"));
} }
public string GetDirectory(WellKnownDirectory directory) public string GetDirectory(WellKnownDirectory directory)

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;
using System.Diagnostics; using System.Diagnostics;
using System.Globalization; using System.Globalization;

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Net.Http; using System.Net.Http;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
@@ -24,4 +24,4 @@ namespace GitHub.Runner.Common
return client; return client;
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Net; using System.Net;
using System.Net.Sockets; using System.Net.Sockets;
using System.Text; using System.Text;
@@ -37,10 +37,10 @@ namespace GitHub.Runner.Common
{ {
ConnectMonitor(monitorSocketAddress); ConnectMonitor(monitorSocketAddress);
} }
private void StartMonitor(Guid jobId, string accessToken, Uri serverUri) private void StartMonitor(Guid jobId, string accessToken, Uri serverUri)
{ {
if (String.IsNullOrEmpty(accessToken)) if(String.IsNullOrEmpty(accessToken))
{ {
Trace.Info("No access token could be retrieved to start the monitor."); Trace.Info("No access token could be retrieved to start the monitor.");
return; return;
@@ -82,7 +82,7 @@ namespace GitHub.Runner.Common
_monitorSocket.Send(Encoding.UTF8.GetBytes(message)); _monitorSocket.Send(Encoding.UTF8.GetBytes(message));
Trace.Info("Finished EndMonitor writing to socket"); Trace.Info("Finished EndMonitor writing to socket");
await Task.Delay(TimeSpan.FromSeconds(2)); await Task.Delay(TimeSpan.FromSeconds(2));
} }
} }
catch (SocketException e) catch (SocketException e)

View File

@@ -1,10 +1,9 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Net.Http; using System.Net.Http;
using System.Net.Http.Headers; using System.Net.Http.Headers;
using System.Net.Security;
using System.Net.WebSockets; using System.Net.WebSockets;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
@@ -12,10 +11,10 @@ using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Utilities.Internal; using GitHub.Services.WebApi.Utilities.Internal;
using GitHub.Services.Results.Client;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
@@ -180,10 +179,6 @@ namespace GitHub.Runner.Common
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent()); userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
userAgentValues.AddRange(HostContext.UserAgents); userAgentValues.AddRange(HostContext.UserAgents);
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString()))); this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{
this._websocketClient.Options.RemoteCertificateValidationCallback = (_, _, _, _) => true;
}
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay); this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
} }
@@ -259,7 +254,7 @@ namespace GitHub.Runner.Common
{ {
failedAttemptsToPostBatchedLinesByWebsocket++; failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State})."); Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Verbose(ex.ToString()); Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider) if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{ {
// let's consider failure percentage // let's consider failure percentage

View File

@@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
@@ -15,11 +14,10 @@ namespace GitHub.Runner.Common
[ServiceLocator(Default = typeof(JobServerQueue))] [ServiceLocator(Default = typeof(JobServerQueue))]
public interface IJobServerQueue : IRunnerService, IThrottlingReporter public interface IJobServerQueue : IRunnerService, IThrottlingReporter
{ {
IList<JobTelemetry> JobTelemetries { get; }
TaskCompletionSource<int> JobRecordUpdated { get; } TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling; event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync(); Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false); void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null); void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines); void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
@@ -71,19 +69,13 @@ namespace GitHub.Runner.Common
private Task[] _allDequeueTasks; private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new(); private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new(); private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
private readonly List<JobTelemetry> _jobTelemetries = new();
private bool _queueInProcess = false; private bool _queueInProcess = false;
private bool _resultsServiceOnly = false; private bool _resultsServiceOnly = false;
private int _resultsServiceExceptionsCount = 0;
private Stopwatch _resultsUploadTimer = new();
private Stopwatch _actionsUploadTimer = new();
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated; public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling; public event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
public IList<JobTelemetry> JobTelemetries => _jobTelemetries;
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds). // Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms. // Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start, // In this way, customer still can get instance live console output on job start,
@@ -95,7 +87,6 @@ namespace GitHub.Runner.Common
private bool _firstConsoleOutputs = true; private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false; private bool _resultsClientInitiated = false;
private bool _enableTelemetry = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file); private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
@@ -105,14 +96,14 @@ namespace GitHub.Runner.Common
_resultsServer = hostContext.GetService<IResultsServer>(); _resultsServer = hostContext.GetService<IResultsServer>();
} }
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false) public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
{ {
Trace.Entering(); Trace.Entering();
_resultsServiceOnly = resultsServiceOnly; _resultsServiceOnly = resultServiceOnly;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (!resultsServiceOnly) if (!resultServiceOnly)
{ {
_jobServer.InitializeWebsocketClient(serviceEndPoint); _jobServer.InitializeWebsocketClient(serviceEndPoint);
} }
@@ -128,21 +119,15 @@ namespace GitHub.Runner.Common
{ {
string liveConsoleFeedUrl = null; string liveConsoleFeedUrl = null;
Trace.Info("Initializing results client"); Trace.Info("Initializing results client");
if (resultsServiceOnly if (resultServiceOnly
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
&& !string.IsNullOrEmpty(feedStreamUrl)) && !string.IsNullOrEmpty(feedStreamUrl))
{ {
liveConsoleFeedUrl = feedStreamUrl; liveConsoleFeedUrl = feedStreamUrl;
} }
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
_resultsClientInitiated = true;
}
// Enable telemetry if we have both results service and actions service _resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
if (_resultsClientInitiated && !_resultsServiceOnly) _resultsClientInitiated = true;
{
_enableTelemetry = true;
} }
if (_queueInProcess) if (_queueInProcess)
@@ -226,12 +211,6 @@ namespace GitHub.Runner.Common
await _resultsServer.DisposeAsync(); await _resultsServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained."); Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
if (_enableTelemetry)
{
var uploadTimeComparison = $"Actions upload time: {_actionsUploadTimer.ElapsedMilliseconds} ms, Result upload time: {_resultsUploadTimer.ElapsedMilliseconds} ms";
Trace.Info(uploadTimeComparison);
_jobTelemetries.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = uploadTimeComparison });
}
} }
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber) public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
@@ -477,10 +456,6 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
if (_enableTelemetry)
{
_actionsUploadTimer.Start();
}
await UploadFile(file); await UploadFile(file);
} }
catch (Exception ex) catch (Exception ex)
@@ -496,13 +471,6 @@ namespace GitHub.Runner.Common
// _fileUploadQueue.Enqueue(file); // _fileUploadQueue.Enqueue(file);
//} //}
} }
finally
{
if (_enableTelemetry)
{
_actionsUploadTimer.Stop();
}
}
} }
Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount); Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
@@ -549,18 +517,10 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
if (_enableTelemetry)
{
_resultsUploadTimer.Start();
}
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase)) if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{ {
await UploadSummaryFile(file); await UploadSummaryFile(file);
} }
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
{
await UploadResultsDiagnosticLogsFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase)) else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{ {
if (file.RecordId != _jobTimelineRecordId) if (file.RecordId != _jobTimelineRecordId)
@@ -580,20 +540,11 @@ namespace GitHub.Runner.Common
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort."); Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex); Trace.Error(ex);
errorCount++; errorCount++;
_resultsServiceExceptionsCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs // If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3) _resultsClientInitiated = false;
{
_resultsClientInitiated = false; SendResultsTelemetry(ex);
SendResultsTelemetry(ex);
}
}
finally
{
if (_enableTelemetry)
{
_resultsUploadTimer.Stop();
}
} }
} }
@@ -613,7 +564,7 @@ namespace GitHub.Runner.Common
private void SendResultsTelemetry(Exception ex) private void SendResultsTelemetry(Exception ex)
{ {
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {HostContext.SecretMasker.MaskSecrets(ex.Message)}" }; var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure; issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord() var telemetryRecord = new TimelineRecord()
@@ -709,13 +660,9 @@ namespace GitHub.Runner.Common
{ {
Trace.Info("Catch exception during update steps, skip update Results."); Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e); Trace.Error(e);
_resultsServiceExceptionsCount++; _resultsClientInitiated = false;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3) SendResultsTelemetry(e);
{
_resultsClientInitiated = false;
SendResultsTelemetry(e);
}
} }
if (_bufferedRetryRecords.Remove(update.TimelineId)) if (_bufferedRetryRecords.Remove(update.TimelineId))
@@ -809,17 +756,17 @@ namespace GitHub.Runner.Common
timelineRecord.State = rec.State ?? timelineRecord.State; timelineRecord.State = rec.State ?? timelineRecord.State;
timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName; timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName;
if (rec.ErrorCount > 0) if (rec.ErrorCount != null && rec.ErrorCount > 0)
{ {
timelineRecord.ErrorCount = rec.ErrorCount; timelineRecord.ErrorCount = rec.ErrorCount;
} }
if (rec.WarningCount > 0) if (rec.WarningCount != null && rec.WarningCount > 0)
{ {
timelineRecord.WarningCount = rec.WarningCount; timelineRecord.WarningCount = rec.WarningCount;
} }
if (rec.NoticeCount > 0) if (rec.NoticeCount != null && rec.NoticeCount > 0)
{ {
timelineRecord.NoticeCount = rec.NoticeCount; timelineRecord.NoticeCount = rec.NoticeCount;
} }
@@ -850,7 +797,7 @@ namespace GitHub.Runner.Common
foreach (var record in mergedRecords) foreach (var record in mergedRecords)
{ {
Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}"); Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}");
if (record.Issues != null) if (record.Issues != null && record.Issues.Count > 0)
{ {
foreach (var issue in record.Issues) foreach (var issue in record.Issues)
{ {
@@ -860,7 +807,7 @@ namespace GitHub.Runner.Common
} }
} }
if (record.Variables != null) if (record.Variables != null && record.Variables.Count > 0)
{ {
foreach (var variable in record.Variables) foreach (var variable in record.Variables)
{ {
@@ -934,17 +881,6 @@ namespace GitHub.Runner.Common
await UploadResultsFile(file, summaryHandler); await UploadResultsFile(file, summaryHandler);
} }
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
{
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, diagnosticLogsHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file) private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{ {
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}"); Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");

View File

@@ -1,4 +1,4 @@
using System; using System;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common

View File

@@ -1,56 +0,0 @@
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Launch.Client;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(LaunchServer))]
public interface ILaunchServer : IRunnerService
{
void InitializeLaunchClient(Uri uri, string token);
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
}
public sealed class LaunchServer : RunnerService, ILaunchServer
{
private LaunchHttpClient _launchClient;
public void InitializeLaunchClient(Uri uri, string token)
{
// Using default 100 timeout
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
// Create retry handler
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
if (settings.MaxRetryRequest > 0)
{
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
}
// Setup RawHttpMessageHandler without credentials
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
this._launchClient = new LaunchHttpClient(uri, pipeline, token, disposeHandler: true);
}
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
CancellationToken cancellationToken)
{
if (_launchClient != null)
{
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Launch client is not initialized.");
}
}
}

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.IO; using System.IO;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;

View File

@@ -1,73 +0,0 @@
using System;
using System.ComponentModel;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
namespace GitHub.Runner.Common
{
/// <summary>
/// Handles redirects for Http requests
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class RedirectMessageHandler : DelegatingHandler
{
public RedirectMessageHandler(ITraceWriter trace)
{
Trace = trace;
}
protected override async Task<HttpResponseMessage> SendAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response != null &&
IsRedirect(response.StatusCode) &&
response.Headers.Location != null)
{
Trace.Info($"Redirecting to '{response.Headers.Location}'.");
request = await CloneAsync(request, response.Headers.Location).ConfigureAwait(false);
response.Dispose();
response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
}
return response;
}
private static bool IsRedirect(HttpStatusCode statusCode)
{
return (int)statusCode >= 300 && (int)statusCode < 400;
}
private static async Task<HttpRequestMessage> CloneAsync(HttpRequestMessage request, Uri requestUri)
{
var clone = new HttpRequestMessage(request.Method, requestUri)
{
Version = request.Version
};
request.Headers.ForEach(header => clone.Headers.TryAddWithoutValidation(header.Key, header.Value));
request.Options.ForEach(option => clone.Options.Set(new HttpRequestOptionsKey<object>(option.Key), option.Value));
if (request.Content != null)
{
clone.Content = new ByteArrayContent(await request.Content.ReadAsByteArrayAsync().ConfigureAwait(false));
request.Content.Headers.ForEach(header => clone.Content.Headers.TryAddWithoutValidation(header.Key, header.Value));
}
return clone;
}
private readonly ITraceWriter Trace;
}
}

View File

@@ -1,7 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers; using System.Net.Http.Headers;
using System.Net.WebSockets; using System.Net.WebSockets;
using System.Security; using System.Security;
@@ -19,7 +18,7 @@ namespace GitHub.Runner.Common
[ServiceLocator(Default = typeof(ResultServer))] [ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService, IAsyncDisposable public interface IResultsServer : IRunnerService, IAsyncDisposable
{ {
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk); void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken); Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
@@ -35,8 +34,6 @@ namespace GitHub.Runner.Common
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken); IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
} }
public sealed class ResultServer : RunnerService, IResultsServer public sealed class ResultServer : RunnerService, IResultsServer
@@ -53,10 +50,10 @@ namespace GitHub.Runner.Common
private String _liveConsoleFeedUrl; private String _liveConsoleFeedUrl;
private string _token; private string _token;
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk) public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{ {
this._resultsClient = CreateHttpClient(uri, token, useSdk); var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
_token = token; _token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl)) if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{ {
@@ -65,26 +62,6 @@ namespace GitHub.Runner.Common
} }
} }
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
{
// Using default 100 timeout
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
// Create retry handler
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
if (settings.MaxRetryRequest > 0)
{
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
}
// Setup RawHttpMessageHandler without credentials
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file, public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken) CancellationToken cancellationToken)
{ {
@@ -143,18 +120,6 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException("Results client is not initialized."); throw new InvalidOperationException("Results client is not initialized.");
} }
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public ValueTask DisposeAsync() public ValueTask DisposeAsync()
{ {
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None); CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
@@ -166,13 +131,13 @@ namespace GitHub.Runner.Common
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false) private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
{ {
if (string.IsNullOrEmpty(accessToken)) if (!string.IsNullOrEmpty(accessToken))
{ {
Trace.Info($"No access token from server"); Trace.Info($"No access token from server");
return; return;
} }
if (string.IsNullOrEmpty(liveConsoleFeedUrl)) if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{ {
Trace.Info($"No live console feed url from server"); Trace.Info($"No live console feed url from server");
return; return;
@@ -257,7 +222,7 @@ namespace GitHub.Runner.Common
{ {
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect); var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms."); Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
Trace.Verbose(ex.ToString()); Trace.Error(ex);
retries++; retries++;
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay); InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
} }

View File

@@ -1,11 +1,10 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi; using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines; using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts; using Sdk.RSWebApi.Contracts;
@@ -20,15 +19,7 @@ namespace GitHub.Runner.Common
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token); Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync( Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
string environmentUrl,
CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token); Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
} }
@@ -61,26 +52,14 @@ namespace GitHub.Runner.Common
{ {
CheckConnection(); CheckConnection();
return RetryRequest<AgentJobRequestMessage>( return RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, cancellationToken), cancellationToken, async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken);
shouldRetry: ex =>
ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404
ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409
ex is not TaskOrchestrationJobUnprocessableException); // HTTP status 422
} }
public Task CompleteJobAsync( public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
string environmentUrl,
CancellationToken cancellationToken)
{ {
CheckConnection(); CheckConnection();
return RetryRequest( return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken); async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
} }
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken) public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)

View File

@@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk"> <Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup> <PropertyGroup>
<TargetFramework>net6.0</TargetFramework> <TargetFramework>net6.0</TargetFramework>
@@ -16,7 +16,7 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" /> <PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" /> <PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" /> <PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" /> <PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
<PackageReference Include="System.Threading.Channels" Version="4.4.0" /> <PackageReference Include="System.Threading.Channels" Version="4.4.0" />

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
@@ -15,11 +15,12 @@ namespace GitHub.Runner.Common
[ServiceLocator(Default = typeof(RunnerDotcomServer))] [ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService public interface IRunnerDotcomServer : IRunnerService
{ {
Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName); Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey); Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken); Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
} }
public enum RequestType public enum RequestType
@@ -41,7 +42,7 @@ namespace GitHub.Runner.Common
} }
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName) public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{ {
var githubApiUrl = ""; var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl); var gitHubUrlBuilder = new UriBuilder(githubUrl);
@@ -51,11 +52,11 @@ namespace GitHub.Runner.Common
// org runner // org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{ {
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
} }
else else
{ {
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
} }
} }
else if (path.Length == 2) else if (path.Length == 2)
@@ -68,11 +69,11 @@ namespace GitHub.Runner.Common
if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{ {
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
} }
else else
{ {
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
} }
} }
else else
@@ -81,8 +82,14 @@ namespace GitHub.Runner.Common
} }
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools"); var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
return runnersList.ToTaskAgents(); if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
} }
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken) public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
@@ -130,16 +137,6 @@ namespace GitHub.Runner.Common
} }
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey) public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, false);
}
public async Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, true);
}
private async Task<DistributedTask.WebApi.Runner> AddOrReplaceRunner(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, bool replace)
{ {
var gitHubUrlBuilder = new UriBuilder(githubUrl); var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries); var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
@@ -162,15 +159,9 @@ namespace GitHub.Runner.Common
{"updates_disabled", agent.DisableUpdate}, {"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral}, {"ephemeral", agent.Ephemeral},
{"labels", agent.Labels}, {"labels", agent.Labels},
{"public_key", publicKey}, {"public_key", publicKey}
}; };
if (replace)
{
bodyObject.Add("runner_id", agent.Id);
bodyObject.Add("replace", replace);
}
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"); var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body); return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
@@ -204,7 +195,7 @@ namespace GitHub.Runner.Common
if (response != null) if (response != null)
{ {
responseStatus = response.StatusCode; responseStatus = response.StatusCode;
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers); var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
@@ -224,7 +215,7 @@ namespace GitHub.Runner.Common
} }
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
Trace.Error($"{errorMessage} -- Attempt: {retry}"); Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex); Trace.Error(ex);
} }
} }
@@ -233,5 +224,14 @@ namespace GitHub.Runner.Common
await Task.Delay(backOff); await Task.Delay(backOff);
} }
} }
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
} }
} }

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
@@ -27,8 +27,8 @@ namespace GitHub.Runner.Common
// Configuration // Configuration
Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent); Task<TaskAgent> AddAgentAsync(Int32 agentPoolId, TaskAgent agent);
Task DeleteAgentAsync(int agentPoolId, ulong agentId); Task DeleteAgentAsync(int agentPoolId, int agentId);
Task DeleteAgentAsync(ulong agentId); Task DeleteAgentAsync(int agentId);
Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation); Task<List<TaskAgentPool>> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation);
Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null); Task<List<TaskAgent>> GetAgentsAsync(int agentPoolId, string agentName = null);
Task<List<TaskAgent>> GetAgentsAsync(string agentName); Task<List<TaskAgent>> GetAgentsAsync(string agentName);
@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken); Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken); Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken); Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken); Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
// job request // job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken); Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
@@ -50,7 +50,7 @@ namespace GitHub.Runner.Common
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken); Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
// agent update // agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace); Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
} }
public sealed class RunnerServer : RunnerService, IRunnerServer public sealed class RunnerServer : RunnerService, IRunnerServer
@@ -239,13 +239,13 @@ namespace GitHub.Runner.Common
return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent); return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent);
} }
public Task DeleteAgentAsync(int agentPoolId, ulong agentId) public Task DeleteAgentAsync(int agentPoolId, int agentId)
{ {
CheckConnection(RunnerConnectionType.Generic); CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId); return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId);
} }
public Task DeleteAgentAsync(ulong agentId) public Task DeleteAgentAsync(int agentId)
{ {
return DeleteAgentAsync(0, agentId); // agentPool is ignored server side return DeleteAgentAsync(0, agentId); // agentPool is ignored server side
} }
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken); return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
} }
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
{ {
CheckConnection(RunnerConnectionType.MessageQueue); CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, os, architecture, disableUpdate, cancellationToken: cancellationToken); return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
} }
//----------------------------------------------------------------- //-----------------------------------------------------------------
@@ -315,7 +315,7 @@ namespace GitHub.Runner.Common
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken); return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
} }
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace) public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
{ {
CheckConnection(RunnerConnectionType.Generic); CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace); return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);

View File

@@ -80,11 +80,10 @@ namespace GitHub.Runner.Common
} }
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount); await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
} }
protected async Task<T> RetryRequest<T>(Func<Task<T>> func, protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken, CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5, int maxRetryAttemptsCount = 5
Func<Exception, bool> shouldRetry = null
) )
{ {
var retryCount = 0; var retryCount = 0;
@@ -97,7 +96,7 @@ namespace GitHub.Runner.Common
return await func(); return await func();
} }
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122 // TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex))) catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
{ {
Trace.Error("Catch exception during request"); Trace.Error("Catch exception during request");
Trace.Error(ex); Trace.Error(ex);

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Diagnostics; using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
@@ -93,4 +93,4 @@ namespace GitHub.Runner.Common
IndentLevel--; IndentLevel--;
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Net.Http; using System.Net.Http;

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Diagnostics; using System.Diagnostics;
using GitHub.DistributedTask.Logging; using GitHub.DistributedTask.Logging;

View File

@@ -1,4 +1,4 @@
using Newtonsoft.Json; using Newtonsoft.Json;
using System; using System;
using System.Diagnostics; using System.Diagnostics;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;

View File

@@ -1,8 +1,8 @@
// Represents absence of value. // Represents absence of value.
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
public readonly struct Unit public readonly struct Unit
{ {
public static readonly Unit Value = default; public static readonly Unit Value = default;
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;

View File

@@ -4,7 +4,7 @@
public static class EnumUtil public static class EnumUtil
{ {
public static T? TryParse<T>(string value) where T : struct public static T? TryParse<T>(string value) where T: struct
{ {
T val; T val;
if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val)) if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val))

View File

@@ -1,4 +1,4 @@
namespace GitHub.Runner.Common.Util namespace GitHub.Runner.Common.Util
{ {
using System; using System;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;

View File

@@ -6,7 +6,13 @@ namespace GitHub.Runner.Common.Util
public static class NodeUtil public static class NodeUtil
{ {
private const string _defaultNodeVersion = "node16"; private const string _defaultNodeVersion = "node16";
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
#if (OS_OSX || OS_WINDOWS) && ARM64
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16" });
#else
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node12", "node16" });
#endif
public static string GetInternalNodeVersion() public static string GetInternalNodeVersion()
{ {
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion); var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);

View File

@@ -1,4 +1,4 @@
using System; using System;
namespace GitHub.Runner.Common.Util namespace GitHub.Runner.Common.Util
{ {

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
@@ -24,15 +24,7 @@ namespace GitHub.Runner.Listener
private TimeSpan _getNextMessageRetryInterval; private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online; private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource; private CancellationTokenSource _getMessagesTokenSource;
private VssCredentials _creds;
private TaskAgentSession _session;
private IBrokerServer _brokerServer; private IBrokerServer _brokerServer;
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
private bool _accessTokenRevoked = false;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
@@ -42,141 +34,15 @@ namespace GitHub.Runner.Listener
_brokerServer = HostContext.GetService<IBrokerServer>(); _brokerServer = HostContext.GetService<IBrokerServer>();
} }
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token) public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{ {
Trace.Entering(); await RefreshBrokerConnection();
return await Task.FromResult(true);
// Settings
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
var serverUrl = _settings.ServerUrlV2;
Trace.Info(_settings);
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
// Create connection.
Trace.Info("Loading Credentials");
var credMgr = HostContext.GetService<ICredentialManager>();
_creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
Id = _settings.AgentId,
Name = _settings.AgentName,
Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription,
};
var currentProcess = Process.GetCurrentProcess();
string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})";
var taskAgentSession = new TaskAgentSession(sessionName, agent);
string errorMessage = string.Empty;
bool encounteringError = false;
while (true)
{
token.ThrowIfCancellationRequested();
Trace.Info($"Attempt to create session.");
try
{
Trace.Info("Connecting to the Broker Server...");
await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds);
Trace.Info("VssConnection created");
_term.WriteLine();
_term.WriteSuccessMessage("Connected to GitHub");
_term.WriteLine();
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
Trace.Info($"Session created.");
if (encounteringError)
{
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
_sessionCreationExceptionTracker.Clear();
encounteringError = false;
}
return CreateSessionResult.Success;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Session creation has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
_accessTokenRevoked = true;
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
{
// "invalid_client" means the runner registration has been deleted from the server.
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
}
// Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
return CreateSessionResult.Failure;
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
if (ex is TaskAgentSessionConflictException)
{
return CreateSessionResult.SessionConflict;
}
return CreateSessionResult.Failure;
}
if (!encounteringError) //print the message only on the first error
{
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
await HostContext.Delay(_sessionCreationRetryInterval, token);
}
}
} }
public async Task DeleteSessionAsync() public async Task DeleteSessionAsync()
{ {
if (_session != null && _session.SessionId != Guid.Empty) await Task.CompletedTask;
{
if (!_accessTokenRevoked)
{
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{
await _brokerServer.DeleteSessionAsync(ts.Token);
}
}
else
{
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
}
}
} }
public void OnJobStatus(object sender, JobStatusEventArgs e) public void OnJobStatus(object sender, JobStatusEventArgs e)
@@ -207,13 +73,7 @@ namespace GitHub.Runner.Listener
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); _getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try try
{ {
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId, message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
_settings.DisableUpdate,
_getMessagesTokenSource.Token);
if (message == null) if (message == null)
{ {
@@ -237,7 +97,7 @@ namespace GitHub.Runner.Listener
Trace.Info("Runner OAuth token has been revoked. Unable to pull message."); Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
throw; throw;
} }
catch (AccessDeniedException e) when (e.ErrorCode == 1) catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{ {
throw; throw;
} }
@@ -248,7 +108,7 @@ namespace GitHub.Runner.Listener
if (!IsGetNextMessageExceptionRetriable(ex)) if (!IsGetNextMessageExceptionRetriable(ex))
{ {
throw new NonRetryableException("Get next message failed with non-retryable error.", ex); throw;
} }
else else
{ {
@@ -278,7 +138,7 @@ namespace GitHub.Runner.Listener
} }
// re-create VssConnection before next retry // re-create VssConnection before next retry
await RefreshBrokerConnectionAsync(); await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds); Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token); await HostContext.Delay(_getNextMessageRetryInterval, token);
@@ -308,11 +168,6 @@ namespace GitHub.Runner.Listener
} }
} }
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
{
await RefreshBrokerConnectionAsync();
}
public async Task DeleteMessageAsync(TaskAgentMessage message) public async Task DeleteMessageAsync(TaskAgentMessage message)
{ {
await Task.CompletedTask; await Task.CompletedTask;
@@ -336,84 +191,12 @@ namespace GitHub.Runner.Listener
} }
} }
private bool IsSessionCreationExceptionRetriable(Exception ex) private async Task RefreshBrokerConnection()
{
if (ex is TaskAgentNotFoundException)
{
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
return false;
}
else if (ex is TaskAgentSessionConflictException)
{
Trace.Info("The session for this runner already exists.");
_term.WriteError("A session for this runner already exists.");
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
{
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
{
Trace.Info("The session conflict exception have reached retry limit.");
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
return false;
}
}
else
{
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
}
Trace.Info("The session conflict exception haven't reached retry limit.");
return true;
}
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
{
Trace.Info("Local clock might be skewed.");
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
{
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
{
Trace.Info("The OAuth token request exception have reached retry limit.");
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
return false;
}
}
else
{
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
}
Trace.Info("The OAuth token request exception haven't reached retry limit.");
return true;
}
else if (ex is TaskAgentPoolNotFoundException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else if (ex is InvalidOperationException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnectionAsync()
{ {
var configManager = HostContext.GetService<IConfigurationManager>(); var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings(); _settings = configManager.LoadSettings();
if (string.IsNullOrEmpty(_settings.ServerUrlV2)) if (_settings.ServerUrlV2 == null)
{ {
throw new InvalidOperationException("ServerUrlV2 is not set"); throw new InvalidOperationException("ServerUrlV2 is not set");
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -39,7 +39,6 @@ namespace GitHub.Runner.Listener.Check
string githubApiUrl = null; string githubApiUrl = null;
string actionsTokenServiceUrl = null; string actionsTokenServiceUrl = null;
string actionsPipelinesServiceUrl = null; string actionsPipelinesServiceUrl = null;
string resultsReceiverServiceUrl = null;
var urlBuilder = new UriBuilder(url); var urlBuilder = new UriBuilder(url);
if (UrlUtil.IsHostedServer(urlBuilder)) if (UrlUtil.IsHostedServer(urlBuilder))
{ {
@@ -48,7 +47,6 @@ namespace GitHub.Runner.Listener.Check
githubApiUrl = urlBuilder.Uri.AbsoluteUri; githubApiUrl = urlBuilder.Uri.AbsoluteUri;
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health"; actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health"; actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
resultsReceiverServiceUrl = "https://results-receiver.actions.githubusercontent.com/health";
} }
else else
{ {
@@ -58,31 +56,13 @@ namespace GitHub.Runner.Listener.Check
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri; actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
urlBuilder.Path = "_services/pipelines/_apis/health"; urlBuilder.Path = "_services/pipelines/_apis/health";
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri; actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
resultsReceiverServiceUrl = string.Empty; // we don't have Results service in GHES yet.
} }
var codeLoadUrlBuilder = new UriBuilder(url);
codeLoadUrlBuilder.Host = $"codeload.{codeLoadUrlBuilder.Host}";
codeLoadUrlBuilder.Path = "_ping";
// check github api // check github api
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl)); checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl)); checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id")); checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
// check github codeload
checkTasks.Add(CheckUtil.CheckDns(codeLoadUrlBuilder.Uri.AbsoluteUri));
checkTasks.Add(CheckUtil.CheckPing(codeLoadUrlBuilder.Uri.AbsoluteUri));
checkTasks.Add(HostContext.CheckHttpsGetRequests(codeLoadUrlBuilder.Uri.AbsoluteUri, pat, expectedHeader: "X-GitHub-Request-Id"));
// check results-receiver service
if (!string.IsNullOrEmpty(resultsReceiverServiceUrl))
{
checkTasks.Add(CheckUtil.CheckDns(resultsReceiverServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(resultsReceiverServiceUrl));
checkTasks.Add(HostContext.CheckHttpsGetRequests(resultsReceiverServiceUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
}
// check actions token service // check actions token service
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl)); checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl)); checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
@@ -110,4 +90,4 @@ namespace GitHub.Runner.Listener.Check
return result; return result;
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics.Tracing; using System.Diagnostics.Tracing;
using System.IO; using System.IO;
@@ -351,39 +351,21 @@ namespace GitHub.Runner.Listener.Check
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new() private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
{ {
{ {
"System.Net.Http", "Microsoft-System-Net-Http",
new HashSet<string> new HashSet<string>
{ {
"Info", "Info",
"Associate", "Associate",
"Enter",
"Exit"
} }
}, },
{ {
"System.Net.Security", "Microsoft-System-Net-Security",
new HashSet<string>
{
"Info",
"DumpBuffer",
"SslStreamCtor",
"SecureChannelCtor",
"NoDelegateNoClientCert",
"CertsAfterFiltering",
"UsingCachedCredential",
"SspiSelectedCipherSuite"
}
},
{
"Private.InternalDiagnostics.System.Net.Http",
new HashSet<string>
{
"Info",
"Associate",
}
},
{
"Private.InternalDiagnostics.System.Net.Security",
new HashSet<string> new HashSet<string>
{ {
"Enter",
"Exit",
"Info", "Info",
"DumpBuffer", "DumpBuffer",
"SslStreamCtor", "SslStreamCtor",
@@ -409,8 +391,8 @@ namespace GitHub.Runner.Listener.Check
{ {
base.OnEventSourceCreated(eventSource); base.OnEventSourceCreated(eventSource);
if (eventSource.Name.Contains("System.Net.Http") || if (eventSource.Name == "Microsoft-System-Net-Http" ||
eventSource.Name.Contains("System.Net.Security")) eventSource.Name == "Microsoft-System-Net-Security")
{ {
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All); EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
} }
@@ -433,4 +415,4 @@ namespace GitHub.Runner.Listener.Check
} }
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Common; using GitHub.Runner.Common;
@@ -27,4 +27,4 @@ namespace GitHub.Runner.Listener.Check
public List<string> Logs { get; set; } public List<string> Logs { get; set; }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -56,4 +56,4 @@ namespace GitHub.Runner.Listener.Check
return result; return result;
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using System; using System;
using System.Collections; using System.Collections;
@@ -29,8 +29,8 @@ namespace GitHub.Runner.Listener
private readonly Dictionary<string, string[]> validOptions = new() private readonly Dictionary<string, string[]> validOptions = new()
{ {
// Valid configure flags and args // Valid configure flags and args
[Constants.Runner.CommandLine.Commands.Configure] = [Constants.Runner.CommandLine.Commands.Configure] =
new string[] new string[]
{ {
Constants.Runner.CommandLine.Flags.DisableUpdate, Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral, Constants.Runner.CommandLine.Flags.Ephemeral,
@@ -38,7 +38,6 @@ namespace GitHub.Runner.Listener
Constants.Runner.CommandLine.Flags.Replace, Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService, Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Unattended, Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.NoDefaultLabels,
Constants.Runner.CommandLine.Args.Auth, Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels, Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress, Constants.Runner.CommandLine.Args.MonitorSocketAddress,
@@ -86,7 +85,6 @@ namespace GitHub.Runner.Listener
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral); public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig); public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig);
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help); public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool NoDefaultLabels => TestFlag(Constants.Runner.CommandLine.Flags.NoDefaultLabels);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended); public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version); public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local); public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
@@ -184,7 +182,7 @@ namespace GitHub.Runner.Listener
{ {
command = Constants.Runner.CommandLine.Commands.Warmup; command = Constants.Runner.CommandLine.Commands.Warmup;
} }
return command; return command;
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Net.Http; using System.Net.Http;
@@ -137,7 +137,7 @@ namespace GitHub.Runner.Listener.Configuration
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register); GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl; runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow; runnerSettings.UseV2Flow = authResult.UseV2Flow;
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}"); _term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials(); creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth"); Trace.Info("cred retrieved via GitHub auth");
} }
@@ -244,11 +244,11 @@ namespace GitHub.Runner.Listener.Configuration
List<TaskAgent> agents; List<TaskAgent> agents;
if (runnerSettings.UseV2Flow) if (runnerSettings.UseV2Flow)
{ {
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName); agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
} }
else else
{ {
agents = await _runnerServer.GetAgentsAsync(runnerSettings.AgentName); agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
} }
Trace.Verbose("Returns {0} agents", agents.Count); Trace.Verbose("Returns {0} agents", agents.Count);
@@ -259,27 +259,11 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace()) if (command.GetReplace())
{ {
// Update existing agent with new PublicKey, agent version. // Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels); agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try try
{ {
if (runnerSettings.UseV2Flow) agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
{
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
}
else
{
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
}
if (command.DisableUpdate && if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate) command.DisableUpdate != agent.DisableUpdate)
{ {
@@ -309,7 +293,7 @@ namespace GitHub.Runner.Listener.Configuration
else else
{ {
// Create a new agent. // Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels); agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try try
{ {
@@ -570,7 +554,7 @@ namespace GitHub.Runner.Listener.Configuration
} }
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels) private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{ {
ArgUtil.NotNull(agent, nameof(agent)); ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization agent.Authorization = new TaskAgentAuthorization
@@ -587,16 +571,9 @@ namespace GitHub.Runner.Listener.Configuration
agent.Labels.Clear(); agent.Labels.Clear();
if (!noDefaultLabels) agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
{ agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System)); agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
foreach (var userLabel in userLabels) foreach (var userLabel in userLabels)
{ {
@@ -606,7 +583,7 @@ namespace GitHub.Runner.Listener.Configuration
return agent; return agent;
} }
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels) private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{ {
TaskAgent agent = new(agentName) TaskAgent agent = new(agentName)
{ {
@@ -621,16 +598,9 @@ namespace GitHub.Runner.Listener.Configuration
DisableUpdate = disableUpdate DisableUpdate = disableUpdate
}; };
if (!noDefaultLabels) agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
{ agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System)); agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
foreach (var userLabel in userLabels) foreach (var userLabel in userLabels)
{ {
@@ -725,7 +695,7 @@ namespace GitHub.Runner.Listener.Configuration
{ {
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty)); var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode; responseStatus = response.StatusCode;
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers); var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
@@ -744,7 +714,7 @@ namespace GitHub.Runner.Listener.Configuration
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
retryCount++; retryCount++;
Trace.Error($"Failed to get JIT runner token -- Attempt: {retryCount}"); Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
Trace.Error(ex); Trace.Error(ex);
} }
} }
@@ -788,7 +758,7 @@ namespace GitHub.Runner.Listener.Configuration
{ {
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json")); var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode; responseStatus = response.StatusCode;
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers); var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
@@ -807,7 +777,7 @@ namespace GitHub.Runner.Listener.Configuration
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound) catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{ {
retryCount++; retryCount++;
Trace.Error($"Failed to get tenant credentials -- Attempt: {retryCount}"); Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
Trace.Error(ex); Trace.Error(ex);
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Runtime.Serialization; using System.Runtime.Serialization;
using GitHub.Runner.Common; using GitHub.Runner.Common;
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
if (!store.HasCredentials()) if (!store.HasCredentials())
{ {
throw new InvalidOperationException("Credentials not stored. Must reconfigure."); throw new InvalidOperationException("Credentials not stored. Must reconfigure.");
} }
CredentialData credData = store.GetCredentials(); CredentialData credData = store.GetCredentials();

View File

@@ -1,4 +1,4 @@
#if OS_WINDOWS #if OS_WINDOWS
#pragma warning disable CA1416 #pragma warning disable CA1416
using System; using System;
using System.Collections; using System.Collections;
@@ -514,25 +514,9 @@ namespace GitHub.Runner.Listener.Configuration
failureActions.Add(new FailureAction(RecoverAction.Restart, 60000)); failureActions.Add(new FailureAction(RecoverAction.Restart, 60000));
// Lock the Service Database // Lock the Service Database
int svcLockRetries = 10; svcLock = LockServiceDatabase(scmHndl);
int svcLockRetryTimeout = 5000; if (svcLock.ToInt64() <= 0)
while (true)
{ {
svcLock = LockServiceDatabase(scmHndl);
if (svcLock.ToInt64() > 0)
{
break;
}
_term.WriteLine("Retrying Lock Service Database...");
svcLockRetries--;
if (svcLockRetries > 0)
{
Thread.Sleep(svcLockRetryTimeout);
continue;
}
throw new Exception("Failed to Lock Service Database for Write"); throw new Exception("Failed to Lock Service Database for Write");
} }

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System; using System;

View File

@@ -1,4 +1,4 @@
#if OS_WINDOWS #if OS_WINDOWS
using System.IO; using System.IO;
using System.Security.Cryptography; using System.Security.Cryptography;
using System.Text; using System.Text;

View File

@@ -1,4 +1,4 @@
#if OS_LINUX || OS_OSX #if OS_LINUX || OS_OSX
using System; using System;
using System.IO; using System.IO;
using System.Security.Cryptography; using System.Security.Cryptography;

Some files were not shown because too many files have changed in this diff Show More