Compare commits

..

1 Commits

Author SHA1 Message Date
eric sciple
fa07c78c0c . 2022-01-26 13:15:05 -06:00
121 changed files with 2554 additions and 6992 deletions

View File

@@ -18,7 +18,7 @@ jobs:
build:
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64 ]
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
include:
- runtime: linux-x64
os: ubuntu-latest
@@ -36,17 +36,13 @@ jobs:
os: macOS-latest
devScript: ./dev.sh
- runtime: osx-arm64
os: macOS-latest
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
# Build runner layout
- name: Build & Layout Release
@@ -54,6 +50,13 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
shell: bash
@@ -77,13 +80,6 @@ jobs:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64'
# Create runner package tar.gz/zip
- name: Package Release
if: github.event_name != 'pull_request'

View File

@@ -23,7 +23,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -11,7 +11,7 @@ jobs:
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
# Make sure ./releaseVersion match ./src/runnerversion
# Query GitHub release ensure version is not used
@@ -51,28 +51,24 @@ jobs:
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64 ]
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
include:
- runtime: linux-x64
os: ubuntu-latest
@@ -89,10 +85,6 @@ jobs:
- runtime: osx-x64
os: macOS-latest
devScript: ./dev.sh
- runtime: osx-arm64
os: macOS-latest
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
@@ -100,7 +92,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
# Build runner layout
- name: Build & Layout Release
@@ -108,6 +100,13 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Create runner package tar.gz/zip
- name: Package Release
if: github.event_name != 'pull_request'
@@ -218,7 +217,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
# Download runner package tar.gz/zip produced by 'build' job
- name: Download Artifact
@@ -240,43 +239,27 @@ jobs:
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
releaseNote = releaseNote.replace(/<WIN_X64_SHA>/g, '${{needs.build.outputs.win-x64-sha}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA>/g, '${{needs.build.outputs.osx-x64-sha}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA>/g, '${{needs.build.outputs.osx-arm64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
console.log(releaseNote)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-arm64-sha}} actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release
- uses: actions/create-release@master
id: createRelease
@@ -320,16 +303,6 @@ jobs:
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm)
uses: actions/upload-release-asset@v1.0.1
env:
@@ -381,16 +354,6 @@ jobs:
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
@@ -442,16 +405,6 @@ jobs:
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
@@ -503,16 +456,6 @@ jobs:
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
@@ -564,16 +507,6 @@ jobs:
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:

6
.vscode/launch.json vendored
View File

@@ -13,7 +13,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Run",
@@ -26,7 +25,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Configure",
@@ -40,7 +38,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Debug Worker",
@@ -48,7 +45,6 @@
"request": "attach",
"processName": "Runner.Worker",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Attach Debugger",
@@ -56,8 +52,6 @@
"request": "attach",
"processId": "${command:pickProcess}",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
],
}

View File

@@ -1,83 +0,0 @@
# ADR: Notification Hooks for Runners
## Context
This ADR details the design changes for supporting custom configurable hooks for on various runner events. This has been a long requested user feature [here](https://github.com/actions/runner/issues/1543), [here](https://github.com/actions/runner/issues/699) and [here](https://github.com/actions/runner/issues/1116) for users to have more information on runner observability, and for the ability to run cleanup and teardown jobs.
This feature is mainly intended for self hosted runner administrators.
**What we hope to solve with this feature**
1. A runner admininstrator is able to add custom scripts to cleanup their runner environment at the start or end of a job
2. A runner admininstrator is able to add custom scripts to help setup their runner environment at the beginning of a job, for reasons like [caching](https://github.com/actions/runner/issues/1543#issuecomment-1050346279)
3. A runner administrator is able to grab custom telemetry of jobs running on their self hosted runner
**What we don't think this will solve**
- Policy features that require certain steps run at the beginning or end of all jobs
- This would be better solved to in a central place in settings, rather then decentralized on each runner.
- The Proposed `Notification Hooks for Runners` is limited to self hosted runners, we don't beileve Policy features should be
- Reuse scenarios between jobs are covered by [composite actions](https://docs.github.com/en/actions/creating-actions/creating-a-composite-action) and [resuable workflows](https://docs.github.com/en/actions/using-workflows/reusing-workflows)
- Security applications, security should be handled on the policy side on the server, not decentralized on each runner
## Hooks
- We will expose 2 variables that users can set to enable hooks
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file.
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
- `.sh` files will execute with the args `-e {pathtofile}`
- `.ps1` files will execute with the args `-command \". '{pathtofile}'\"`
We will **not** set the [standard flags we typically set](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell) for `runs` commands. So, if you want to set `pipefail` on `bash` for example, you will need to do that in your script.
### UI
We want to ensure the experience for users invoking workflows is good, if hooks take too long, you may feel your job is delayed or broken. So, much like `Set Up Job`, we will generate two new steps automatically in your job, one for each configured hook:
- `Set up runner`
- `Complete runner`
These steps will contain all of the output from invoking your hook, so you will have visibility into the runtime. We will also provide information on the path to the hook, and what shell we are invoking it as, much like we do for `run: ` steps.
### Contexts
When running your hooks, some context on your job may be helpful.
- The scripts will have access to the standard [default environment variables](https://docs.github.com/en/actions/learn-github-actions/environment-variables#default-environment-variables)
- Some of these variables are step specific like `GITHUB_ACTION`, in which case they will not be set
- You can pull the full webhook event payload from `GITHUB_EVENT_PATH`
### Commands
Should we expose [Commands](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions) and [Environment Files](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files)
**Yes**. Imagine a scenario where a runner administrator is deprecating a runner pool, and they need to [warn users](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message) to swap to a different pool, we should support them in doing this. However, there are some limitations:
- [save-state](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#sending-values-to-the-pre-and-post-actions) will **not** be supported, these are not traditional steps with pre and post actions
- [set-output](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#using-workflow-commands-to-access-toolkit-functions) will **not** be supported, there is no `id` as this is not a traditional step
### Environment Files
We will also enable [Environment Files](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files) to support setup scenarios for the runner environment.
While a self hosted runner admin can [set env variables](https://docs.github.com/en/actions/hosting-your-own-runners/using-a-proxy-server-with-self-hosted-runners#using-a-env-file-to-set-the-proxy-configuration), these apply to all jobs. By enabling the ability to `add a path` and `set an env` we give runner admins the ability to do this dynamically based on the [workflows environment variables](https://docs.github.com/en/actions/learn-github-actions/environment-variables#default-environment-variables) to empower setup scenarios.
### Exit codes
These are **synchronous** hooks, so they will block job execution while they are being run. Exit code 0 will indicate a successful run of the hook and we will proceed with the job, any other exit code will fail the job with an appropriate annotation.
- There will be no support for `continue-on-error`
## Key Decisions
- We will expose 2 variables that users can set to enable hooks
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
- Users can set these variables to the path of a `.sh` or `.ps1` file, which we will execute when Jobs are started or completed.
- Output from these will be added to a new step at the start/end of a job named `Set up runner` or `Complete runner`.
- These steps will only be generated on runs with these hooks
- These hooks `always()` execute if the env variable is set
- These files will execute as the Runner user, outside of any container specification on the job
- These are **synchronous** hooks
- Runner admins can execute a background process for async hooks if they want
- We will fail the job and halt execution on any exit code that is not 0. The Runner admin is responsible for returning the correct exit code and ensuring resilency.
- This includes that the runner user needs access to the file in the env and the file must exist
- There will be no `continue-on-error` type option on launch
- There will be no `timeout` option on launch
## Consequences
- Runner admins have the ability to tie into the runner job execution to publish their own telemetry or perform their own cleanup or setup
- New steps will be added to the UI showcasing the output of these hooks

View File

@@ -6,35 +6,13 @@
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
- For GitHub.com
- The runner needs to access `https://api.github.com` for downloading actions.
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine:
```
curl -v https://api.github.com/api/v3/zen
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
curl -v https://pipelines.actions.githubusercontent/_apis/health
```
- The runner needs to access https://api.github.com for downloading actions.
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
- For GitHub Enterprise Server
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine, replacing `[hostname]` with the hostname of your appliance, for instance `github.example.com`:
```
curl -v https://[hostname]/api/v3/zen
curl -v https://[hostname]/_services/vstoken/_apis/health
curl -v https://[hostname]/_services/pipelines/_apis/health
```
A common cause of this these connectivity issues is if your to GitHub Enterprise Server appliance is using [the self-signed certificate that is enabled the first time](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) your appliance is started. As self-signed certificates are not trusted by web browsers and Git clients, these clients (including the GitHub Actions runner) will report certificate warnings.
We recommend [upload a certificate signed by a trusted authority](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) to GitHub Enterprise Server, or enabling the built-in ][Let's Encrypt support](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls).
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
## What is checked?
@@ -64,4 +42,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
## Still not working?
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.

View File

@@ -4,9 +4,9 @@
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
> Not the `node` from `$PATH`

View File

@@ -5,6 +5,12 @@
## Supported Versions
- macOS High Sierra (10.13) and later versions
- x64 and arm64 (Apple Silicon)
## Apple Silicon M1
The runner is currently not supported on devices with an Apple M1 chip.
We are waiting for official .NET support. You can read more here about the [current state of support here](https://github.com/orgs/dotnet/projects/18#card-56812463).
Current .NET project board about M1 support:
https://github.com/orgs/dotnet/projects/18#card-56812463
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)

View File

@@ -1,12 +1,23 @@
## Features
- Added a pre-release package for the `macOS-arm64` architecture
- Note that this packages is pre-release status and may not work with all existing actions
- Bump runtime to dotnet 6 (#1471)
- Show service container logs on teardown (#1563)
## Bugs
- Fixed an issue where live console logs would fail to close (#1903)
- Add masks for multiline secrets from ::add-mask:: (#1521)
- fix Log size and retention settings not work (#1507)
- Refactor SelfUpdater adding L0 tests. (#1564)
- Fix test failure: /bin/sleep on Macos 11 (Monterey) does not accept the suffix s. (#1472)
## Misc
- Update dependency check for dotnet 6. (#1551)
- Produce trimmed down runner packages. (#1556)
- Deleted extra background in github-praph.png, which is displayed in README.md (#1432)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
@@ -21,7 +32,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
```
## OSX x64
## OSX
``` bash
# Create a folder
@@ -32,17 +43,6 @@ curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
```
## [Pre-release] OSX arm64 (Apple silicon)
``` bash
# Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
# Extract the installer
tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
```
## Linux x64
``` bash
@@ -85,28 +85,24 @@ The SHA-256 checksums for the packages included in this build are shown below:
- actions-runner-win-x64-<RUNNER_VERSION>.zip <!-- BEGIN SHA win-x64 --><WIN_X64_SHA><!-- END SHA win-x64 -->
- actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-x64 --><OSX_X64_SHA><!-- END SHA osx-x64 -->
- actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-arm64 --><OSX_ARM64_SHA><!-- END SHA osx-arm64 -->
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->

View File

@@ -1 +1 @@
2.292.0
<Update to ./src/runnerversion when creating release>

View File

@@ -13,7 +13,7 @@ set -e
flags_found=false
while getopts 's:g:n:r:u:l:' opt; do
while getopts 's:g:n:u:l:' opt; do
flags_found=true
case $opt in
@@ -26,9 +26,6 @@ while getopts 's:g:n:r:u:l:' opt; do
n)
runner_name=$OPTARG
;;
r)
runner_group=$OPTARG
;;
u)
svc_user=$OPTARG
;;
@@ -47,7 +44,6 @@ Usage:
-s required scope: repo (:owner/:repo) or org (:organization)
-g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment
-n optional name of the runner, defaults to hostname
-r optional name of the runner group to add the runner to, defaults to the Default group
-u optional user svc will run as, defaults to current
-l optional list of labels (split by comma) applied on the runner"
exit 0
@@ -63,7 +59,6 @@ if ! "$flags_found"; then
runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
labels=${5}
runner_group=${6}
fi
# apply defaults
@@ -169,8 +164,8 @@ fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
#---------------------------------------
# Configuring as a service

View File

@@ -25,12 +25,9 @@
<DefineConstants>$(DefineConstants);X86</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(BUILD_OS)' == 'OSX' AND '$(PackageRuntime)' == 'osx-x64'">
<PropertyGroup Condition="'$(BUILD_OS)' == 'OSX'">
<DefineConstants>$(DefineConstants);X64</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(BUILD_OS)' == 'OSX' AND '$(PackageRuntime)' == 'osx-arm64'">
<DefineConstants>$(DefineConstants);ARM64</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(BUILD_OS)' == 'Linux' AND ('$(PackageRuntime)' == 'linux-x64' OR '$(PackageRuntime)' == '')">
<DefineConstants>$(DefineConstants);X64</DefineConstants>

View File

@@ -1 +1 @@
1d709d93e5d3c6c6c656a61aa6c1781050224788a05b0e6ecc4c3c0408bdf89c
de62d296708908cfd1236e58869aebbc2bae8a8c3d629276968542626c508e37

View File

@@ -1 +1 @@
b92a47cfeaad02255b1f7a377060651b73ae5e5db22a188dbbcb4183ab03a03d
44fcd0422dd98ed17d2c8e9057ff2260c50165f20674236a4ae7d2645a07df25

View File

@@ -1 +1 @@
68a9a8ef0843a8bb74241894f6f63fd76241a82295c5337d3cc7a940a314c78e
e57652cf322ee16ce3af4f9e58f80858746b9e1e60279e991a3b3d9a6baf8d79

View File

@@ -1 +0,0 @@
02c7126ff4d63ee2a0ae390c81434c125630522aadf35903bbeebb1a99d8af99

View File

@@ -1 +1 @@
c9d5a542f8d765168855a89e83ae0a8970d00869041c4f9a766651c04c72b212
bdd247b2ff3f51095524412e2ac588e7a87af805e114d6caf2368366ee7be1ea

View File

@@ -1 +1 @@
d94f2fbaf210297162bc9f3add819d73682c3aa6899e321c3872412b924d5504
d23a0cb9f20c0aa1cddb7a39567cd097020cdeb06a1e952940601d1a405c53b8

View File

@@ -1 +1 @@
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e

View File

@@ -1 +1 @@
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081

View File

@@ -1 +1 @@
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147

View File

@@ -1 +0,0 @@
cc4708962a80325de0baa5ae8484e0cb9ae976ac6a4178c1c0d448b8c52bd7f7

View File

@@ -1 +1 @@
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036

View File

@@ -1 +1 @@
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665

View File

@@ -1,6 +1,6 @@
{
"plugins": ["@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
@@ -17,16 +17,13 @@
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/naming-convention": [
"error",
{
"selector": "default",
"format": ["camelCase"]
}
],
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
@@ -36,6 +33,7 @@
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
@@ -43,19 +41,19 @@
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning
"semi": "off"
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true
"es6": true,
"jest/globals": true
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -25,10 +25,10 @@
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}

View File

@@ -1,9 +1,9 @@
import * as glob from '@actions/glob'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as glob from '@actions/glob'
import * as path from 'path'
import * as stream from 'stream'
import * as util from 'util'
import * as path from 'path'
async function run(): Promise<void> {
// arg0 -> node
@@ -45,7 +45,7 @@ async function run(): Promise<void> {
result.end()
if (hasMatch) {
console.log(`Found ${count} files to hash.`)
console.log(`Find ${count} files to hash.`)
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
} else {
console.error(`__OUTPUT____OUTPUT__`)
@@ -53,11 +53,3 @@ async function run(): Promise<void> {
}
run()
.then(out => {
console.log(out)
process.exit(0)
})
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -3,7 +3,7 @@ PACKAGERUNTIME=$1
PRECACHE=$2
NODE_URL=https://nodejs.org/dist
NODE12_VERSION="12.22.7"
NODE12_VERSION="12.13.1"
NODE16_VERSION="16.13.0"
get_abs_path() {
@@ -140,15 +140,10 @@ if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
fi
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
# node.js v12 doesn't support macOS on arm64.
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
fi
# Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
fi

View File

@@ -3,135 +3,94 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process");
var path = require("path");
var path = require("path")
var supported = ["linux", "darwin"];
var supported = ['linux', 'darwin']
if (supported.indexOf(process.platform) == -1) {
console.log("Unsupported platform: " + process.platform);
console.log("Supported platforms are: " + supported.toString());
process.exit(1);
console.log('Unsupported platform: ' + process.platform);
console.log('Supported platforms are: ' + supported.toString());
process.exit(1);
}
var stopping = false;
var listener = null;
var exitServiceAfterNFailures = Number(
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
);
var runService = function () {
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
var interactive = process.argv[2] === "interactive";
if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
if (!stopping) {
try {
if (interactive) {
console.log('Starting Runner listener interactively');
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
} else {
console.log('Starting Runner listener with startup type: service');
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.stderr.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on('close', (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
stopping = true;
} else if (code === 1) {
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
stopping = true;
} else if (code === 2) {
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
} else if (code === 3) {
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
} else {
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
}
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () {
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var interactive = process.argv[2] === "interactive";
if (!stopping) {
try {
if (interactive) {
console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
} else {
console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
listener.stderr.on("data", (data) => {
process.stdout.write(data.toString("utf8"));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on("close", (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
stopping = true;
} else if (code === 1) {
console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
stopping = true;
} else if (code === 2) {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
};
runService();
console.log("Started running service");
console.log('Started running service');
process.on("SIGINT", () => {
gracefulShutdown();
var gracefulShutdown = function (code) {
console.log('Shutting down runner listener');
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
}
}
process.on('SIGINT', () => {
gracefulShutdown(0);
});
process.on("SIGTERM", () => {
gracefulShutdown();
process.on('SIGTERM', () => {
gracefulShutdown(0);
});

View File

@@ -17,13 +17,7 @@ RUNNER_ROOT=`pwd`
LAUNCH_PATH="${HOME}/Library/LaunchAgents"
PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist"
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.plist.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMPLATE_PATH=./bin/actions.runner.plist.template
TEMP_PATH=./bin/actions.runner.plist.temp
CONFIG_PATH=.service
@@ -35,11 +29,7 @@ function failed()
}
if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
failed "Must run from runner root or install is corrupt"
fi
function install()
@@ -63,7 +53,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH}
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -43,32 +43,6 @@ module.exports =
/************************************************************************/
/******/ ({
/***/ 82:
/***/ (function(__unusedmodule, exports) {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 87:
/***/ (function(module) {
@@ -1004,42 +978,6 @@ function regExpEscape (s) {
}
/***/ }),
/***/ 102:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
// For internal use, subject to change.
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
//# sourceMappingURL=file-command.js.map
/***/ }),
/***/ 281:
@@ -1557,12 +1495,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const glob = __importStar(__webpack_require__(281));
const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747));
const glob = __importStar(__webpack_require__(281));
const path = __importStar(__webpack_require__(622));
const stream = __importStar(__webpack_require__(413));
const util = __importStar(__webpack_require__(669));
const path = __importStar(__webpack_require__(622));
function run() {
var e_1, _a;
return __awaiter(this, void 0, void 0, function* () {
@@ -1613,7 +1551,7 @@ function run() {
}
result.end();
if (hasMatch) {
console.log(`Found ${count} files to hash.`);
console.log(`Find ${count} files to hash.`);
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`);
}
else {
@@ -1621,15 +1559,7 @@ function run() {
}
});
}
run()
.then(out => {
console.log(out);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});
run();
/***/ }),
@@ -1757,25 +1687,17 @@ module.exports = require("crypto");
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
const os = __webpack_require__(87);
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
* ##[name key=value;key=value]message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
* ##[warning]This is the user warning message
* ##[set-secret name=mypassword]definitelyNotAPassword!
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
@@ -1800,39 +1722,34 @@ class Command {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
// safely append the val - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
cmdStr += `${key}=${escape(`${val || ''}`)},`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
cmdStr += CMD_STRING;
// safely append the message - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
const message = `${this.message || ''}`;
cmdStr += escapeData(message);
return cmdStr;
}
}
function escapeData(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
function escape(s) {
return s
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
.replace(/]/g, '%5D')
.replace(/;/g, '%3B');
}
//# sourceMappingURL=command.js.map
@@ -1852,19 +1769,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = __webpack_require__(431);
const file_command_1 = __webpack_require__(102);
const utils_1 = __webpack_require__(82);
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const os = __webpack_require__(87);
const path = __webpack_require__(622);
/**
* The code to exit an action
*/
@@ -1885,21 +1793,11 @@ var ExitCode;
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
* @param val the value of the variable
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val);
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = '_GitHubActionsFileCommandDelimeter_';
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
}
process.env[name] = val;
command_1.issueCommand('set-env', { name }, val);
}
exports.exportVariable = exportVariable;
/**
@@ -1915,13 +1813,7 @@ exports.setSecret = setSecret;
* @param inputPath
*/
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
command_1.issueCommand('add-path', {}, inputPath);
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
@@ -1944,22 +1836,12 @@ exports.getInput = getInput;
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
command_1.issueCommand('set-output', { name }, value);
}
exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
@@ -1976,13 +1858,6 @@ exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/**
* Writes debug message to user log
* @param message debug message
@@ -1993,18 +1868,18 @@ function debug(message) {
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param message error issue message
*/
function error(message) {
command_1.issue('error', message instanceof Error ? message.toString() : message);
command_1.issue('error', message);
}
exports.error = error;
/**
* Adds an warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param message warning issue message
*/
function warning(message) {
command_1.issue('warning', message instanceof Error ? message.toString() : message);
command_1.issue('warning', message);
}
exports.warning = warning;
/**
@@ -2062,9 +1937,8 @@ exports.group = group;
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
}

View File

@@ -10,11 +10,10 @@ if [ -f ".path" ]; then
echo ".path=${PATH}"
fi
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
# insert anything to setup env when running as a service
# run the host process which keep the listener alive
./externals/$nodever/bin/node ./bin/RunnerService.js &
./externals/node12/bin/node ./bin/RunnerService.js &
PID=$!
wait $PID
trap - TERM INT

View File

@@ -10,13 +10,7 @@ arg_2=${2}
RUNNER_ROOT=`pwd`
UNIT_PATH=/etc/systemd/system/${SVC_NAME}
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.service.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMPLATE_PATH=./bin/actions.runner.service.template
TEMP_PATH=./bin/actions.runner.service.temp
CONFIG_PATH=.service
@@ -37,11 +31,7 @@ function failed()
}
if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
failed "Must run from runner root or install is corrupt"
fi
#check if we run as root

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ]
do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 2
sleep 2
done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 1
sleep 1
# the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder)
@@ -125,7 +125,7 @@ attemptedtargetedfix=0
currentplatform=$(uname | awk '{print tolower($0)}')
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
# We needed a fix for https://github.com/actions/runner/issues/743
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
# We will recreate the ./externals/node12/bin/node of the past runner version that launched the runnerlistener service
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
# We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid
@@ -135,13 +135,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then
# inspect the open file handles to find the node process
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
nodever="node16"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
then
nodever="node12"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
fi
path=$(lsof -a -g "$procgroup" -F n | grep node12/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -eq 0 && -n "$path" ]]
then
# trim the last 5 characters of the path '/node'
@@ -154,7 +148,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then
date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1
mkdir -p "$trimmedpath"
cp "$rootfolder/externals/$nodever/bin/node" "$path"
cp "$rootfolder/externals/node12/bin/node" "$path"
else
date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1
fi

View File

@@ -10,15 +10,23 @@ fi
# Run
shopt -s nocasematch
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
"$DIR"/bin/Runner.Listener run $*
safe_sleep() {
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
}
bin/Runner.Listener run $*
returnCode=$?
if [[ $returnCode == 0 ]]; then
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
@@ -28,18 +36,18 @@ elif [[ $returnCode == 1 ]]; then
exit 0
elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
"$DIR"/safe_sleep.sh 5
exit 2
safe_sleep
exit 1
elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
exit 2
safe_sleep
exit 1
elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
exit 2
safe_sleep
exit 1
else
echo "Exiting with unknown error code: ${returnCode}"
exit 0

View File

@@ -19,7 +19,7 @@ rem Run.
rem ********************************************************************************
:launch_helper
copy "%~dp0run-helper.cmd.template" "%~dp0run-helper.cmd" /Y
copy run-helper.cmd.template run-helper.cmd /Y
call "%~dp0run-helper.cmd" %*
if %ERRORLEVEL% EQU 1 (
@@ -27,5 +27,5 @@ if %ERRORLEVEL% EQU 1 (
goto :launch_helper
) else (
echo "Exiting runner..."
exit /b 0
exit 0
)

View File

@@ -9,13 +9,13 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
cp -f run-helper.sh.template run-helper.sh
# run the helper process which keep the listener alive
while :;
do
"$DIR"/run-helper.sh $*
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
if [[ $returnCode == 1 ]]; then
echo "Restarting runner..."
else
echo "Exiting runner..."

View File

@@ -1,6 +0,0 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -3,7 +3,6 @@ api-ms-win-core-console-l1-2-0.dll
api-ms-win-core-datetime-l1-1-0.dll
api-ms-win-core-debug-l1-1-0.dll
api-ms-win-core-errorhandling-l1-1-0.dll
api-ms-win-core-fibers-l1-1-0.dll
api-ms-win-core-file-l1-1-0.dll
api-ms-win-core-file-l1-2-0.dll
api-ms-win-core-file-l2-1-0.dll
@@ -71,7 +70,7 @@ Microsoft.VisualBasic.dll
Microsoft.Win32.Primitives.dll
Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.21.52210.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll

View File

@@ -86,7 +86,7 @@ namespace GitHub.Runner.Common
public static class CommandLine
{
//if you are adding a new arg, please make sure you update the
//validOptions dictionary as well present in the CommandSettings.cs
//validArgs array as well present in the CommandSettings.cs
public static class Args
{
public static readonly string Auth = "auth";
@@ -121,7 +121,7 @@ namespace GitHub.Runner.Common
}
//if you are adding a new flag, please make sure you update the
//validOptions dictionary as well present in the CommandSettings.cs
//validFlags array as well present in the CommandSettings.cs
public static class Flags
{
public static readonly string Check = "check";
@@ -149,8 +149,6 @@ namespace GitHub.Runner.Common
public static class Features
{
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
public static readonly string Node12Warning = "DistributedTask.AddWarningToNode12Action";
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
@@ -158,9 +156,7 @@ namespace GitHub.Runner.Common
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
}
public static class RunnerEvent
@@ -192,12 +188,6 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success";
}
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path
{
public static readonly string ActionsDirectory = "_actions";
@@ -229,15 +219,11 @@ namespace GitHub.Runner.Common
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
}
public static class Agent
{
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
}
public static class System

View File

@@ -60,7 +60,6 @@ namespace GitHub.Runner.Common
case "GitHub.Runner.Worker.IFileCommandExtension":
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.CreateStepSummaryCommand, Runner.Worker");
break;
case "GitHub.Runner.Listener.Check.ICheckExtension":
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");

View File

@@ -84,6 +84,7 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift1);
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift2);
this.SecretMasker.AddValueEncoder(ValueEncoders.BashComparand);
this.SecretMasker.AddValueEncoder(ValueEncoders.CommandLineArgumentEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.ExpressionStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
@@ -216,8 +217,6 @@ namespace GitHub.Runner.Common
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
}
public string GetDirectory(WellKnownDirectory directory)

View File

@@ -1,39 +1,32 @@
using System;
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Net.WebSockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Utilities.Internal;
using Newtonsoft.Json;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService, IAsyncDisposable
public interface IJobServer : IRunnerService
{
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -41,20 +34,6 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection)
{
@@ -63,7 +42,7 @@ namespace GitHub.Runner.Common
int attemptCount = totalAttempts;
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
while (!_connection.HasAuthenticated && attemptCount-- > 0)
{
try
@@ -138,21 +117,6 @@ namespace GitHub.Runner.Common
}
}
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection()
{
if (!_hasConnection)
@@ -161,53 +125,6 @@ namespace GitHub.Runner.Common
}
}
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
var userAgentValues = new List<ProductInfoHeaderValue>();
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
userAgentValues.AddRange(HostContext.UserAgents);
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs
//-----------------------------------------------------------------
@@ -218,86 +135,16 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
}
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
{
CheckConnection();
var pushedLinesViaWebsocket = false;
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
if (_websocketClient != null)
{
var linesWrapper = startLine.HasValue ? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value) : new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage: lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
CloseWebSocket(WebSocketCloseStatus.InternalServerError, cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket && !cancellationToken.IsCancellationRequested)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
private void CloseWebSocket(WebSocketCloseStatus closeStatus, CancellationToken cancellationToken)
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
{
try
{
_websocketClient?.CloseOutputAsync(closeStatus, "Closing websocket", cancellationToken);
}
catch (Exception websocketEx)
{
// In some cases this might be okay since the websocket might be open yet, so just close and don't trace exceptions
Trace.Info($"Failed to close websocket gracefully {websocketEx.GetType().Name}");
}
CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
}
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
@@ -339,10 +186,10 @@ namespace GitHub.Runner.Common
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken)
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, jobId, actions, cancellationToken: cancellationToken);
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -71,7 +71,7 @@ namespace GitHub.Runner.Common
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start,
// In this way, customer still can get instance live console output on job start,
// at the same time we can cut the load to server after the build run for more than 60s
private int _webConsoleLineAggressiveDequeueCount = 0;
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
@@ -89,10 +89,6 @@ namespace GitHub.Runner.Common
{
Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -160,9 +156,6 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
@@ -299,10 +292,14 @@ namespace GitHub.Runner.Common
{
try
{
// Give at most 60s for each request.
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)))
// we will not requeue failed batch, since the web console lines are time sensitive.
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs)
@@ -492,8 +489,8 @@ namespace GitHub.Runner.Common
if (runOnce)
{
// continue process timeline records update,
// we might have more records need update,
// continue process timeline records update,
// we might have more records need update,
// since we just create a new sub-timeline
if (pendingSubtimelineUpdate)
{

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -1,22 +0,0 @@
using System;
using System.Collections.ObjectModel;
namespace GitHub.Runner.Common.Util
{
public static class NodeUtil
{
private const string _defaultNodeVersion = "node16";
#if OS_OSX && ARM64
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16" });
#else
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node12", "node16" });
#endif
public static string GetInternalNodeVersion()
{
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
}
}
}

View File

@@ -10,7 +10,6 @@ using System.Net.NetworkInformation;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
@@ -315,12 +314,12 @@ namespace GitHub.Runner.Listener.Check
});
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' ");
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync(
hostContext.GetDirectory(WellKnownDirectory.Root),
node,
node12,
$"\"{downloadCertScript}\"",
env,
true,

View File

@@ -6,7 +6,6 @@ using System.Net;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Listener.Check
@@ -145,12 +144,12 @@ namespace GitHub.Runner.Listener.Check
});
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' ");
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync(
HostContext.GetDirectory(WellKnownDirectory.Root),
node,
node12,
$"\"{makeWebRequestScript}\"",
env,
true,

View File

@@ -17,57 +17,43 @@ namespace GitHub.Runner.Listener
private readonly IPromptManager _promptManager;
private readonly Tracing _trace;
// Valid flags for all commands
private readonly string[] genericOptions =
private readonly string[] validCommands =
{
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Version,
Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.Check
Constants.Runner.CommandLine.Commands.Configure,
Constants.Runner.CommandLine.Commands.Remove,
Constants.Runner.CommandLine.Commands.Run,
Constants.Runner.CommandLine.Commands.Warmup,
};
// Valid flags and args for specific command - key: command, value: array of valid flags and args
private readonly Dictionary<string, string[]> validOptions = new Dictionary<string, string[]>
private readonly string[] validFlags =
{
// Valid configure flags and args
[Constants.Runner.CommandLine.Commands.Configure] =
new string[]
{
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Args.RunnerGroup,
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.Url,
Constants.Runner.CommandLine.Args.UserName,
Constants.Runner.CommandLine.Args.WindowsLogonAccount,
Constants.Runner.CommandLine.Args.WindowsLogonPassword,
Constants.Runner.CommandLine.Args.Work
},
// Valid remove flags and args
[Constants.Runner.CommandLine.Commands.Remove] =
new string[]
{
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.PAT
},
// Valid run flags and args
[Constants.Runner.CommandLine.Commands.Run] =
new string[]
{
Constants.Runner.CommandLine.Flags.Once,
Constants.Runner.CommandLine.Args.StartupType
},
// valid warmup flags and args
[Constants.Runner.CommandLine.Commands.Warmup] =
new string[] { }
Constants.Runner.CommandLine.Flags.Check,
Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Once,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.Version
};
private readonly string[] validArgs =
{
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Args.RunnerGroup,
Constants.Runner.CommandLine.Args.StartupType,
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.Url,
Constants.Runner.CommandLine.Args.UserName,
Constants.Runner.CommandLine.Args.WindowsLogonAccount,
Constants.Runner.CommandLine.Args.WindowsLogonPassword,
Constants.Runner.CommandLine.Args.Work
};
// Commands.
@@ -140,48 +126,17 @@ namespace GitHub.Runner.Listener
List<string> unknowns = new List<string>();
// detect unknown commands
unknowns.AddRange(_parser.Commands.Where(x => !validOptions.Keys.Contains(x, StringComparer.OrdinalIgnoreCase)));
unknowns.AddRange(_parser.Commands.Where(x => !validCommands.Contains(x, StringComparer.OrdinalIgnoreCase)));
if (unknowns.Count == 0)
{
// detect unknown flags and args for valid commands
foreach (var command in _parser.Commands)
{
if (validOptions.TryGetValue(command, out string[] options))
{
unknowns.AddRange(_parser.Flags.Where(x => !options.Contains(x, StringComparer.OrdinalIgnoreCase) && !genericOptions.Contains(x, StringComparer.OrdinalIgnoreCase)));
unknowns.AddRange(_parser.Args.Keys.Where(x => !options.Contains(x, StringComparer.OrdinalIgnoreCase)));
}
}
}
// detect unknown flags
unknowns.AddRange(_parser.Flags.Where(x => !validFlags.Contains(x, StringComparer.OrdinalIgnoreCase)));
// detect unknown args
unknowns.AddRange(_parser.Args.Keys.Where(x => !validArgs.Contains(x, StringComparer.OrdinalIgnoreCase)));
return unknowns;
}
public string GetCommandName()
{
string command = string.Empty;
if (Configure)
{
command = Constants.Runner.CommandLine.Commands.Configure;
}
else if (Remove)
{
command = Constants.Runner.CommandLine.Commands.Remove;
}
else if (Run)
{
command = Constants.Runner.CommandLine.Commands.Run;
}
else if (Warmup)
{
command = Constants.Runner.CommandLine.Commands.Warmup;
}
return command;
}
//
// Interactive flags.
//

View File

@@ -613,50 +613,32 @@ namespace GitHub.Runner.Listener.Configuration
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
}
int retryCount = 0;
while(retryCount < 3)
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
Trace.Error(ex);
}
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
return null;
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
return null;
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
@@ -672,53 +654,35 @@ namespace GitHub.Runner.Listener.Configuration
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
}
int retryCount = 0;
while (retryCount < 3)
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var bodyObject = new Dictionary<string, string>()
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
{"url", githubUrl},
{"runner_event", runnerEvent}
};
var bodyObject = new Dictionary<string, string>()
{
{"url", githubUrl},
{"runner_event", runnerEvent}
};
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
if(response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
Trace.Error(ex);
}
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
return null;
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
return null;
}
}
}

View File

@@ -48,12 +48,13 @@ namespace GitHub.Runner.Listener.Configuration
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
if (serviceName.Length > MaxServiceNameLength)
if (serviceName.Length > 80)
{
Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name.");
// Add 5 to add -xxxx random number on the end
int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters);
Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name.");
int exceededCharLength = serviceName.Length - 80;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45);
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
@@ -65,10 +66,6 @@ namespace GitHub.Runner.Listener.Configuration
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
}
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
var random = new Random();
var num = random.Next(1000, 9999).ToString();
runnerNameSubstring +=$"-{num}";
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
}
@@ -76,12 +73,5 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
}
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
const int MaxServiceNameLength = 80;
const int MaxRepoOrgCharacters = 45;
#endif
}
}

View File

@@ -285,7 +285,7 @@ namespace GitHub.Runner.Listener
{
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds.");
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
}
}
else
@@ -363,7 +363,7 @@ namespace GitHub.Runner.Listener
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
@@ -704,7 +704,7 @@ namespace GitHub.Runner.Listener
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
return;
}
catch (Exception ex)
@@ -762,7 +762,7 @@ namespace GitHub.Runner.Listener
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
}
}
else

View File

@@ -1,18 +1,18 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.Common;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using System.Security.Cryptography;
using System.IO;
using System.Text;
using GitHub.Services.OAuth;
using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Listener
{
@@ -33,7 +33,6 @@ namespace GitHub.Runner.Listener
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private TimeSpan _getNextMessageRetryInterval;
private bool _accessTokenRevoked = false;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
@@ -112,7 +111,6 @@ namespace GitHub.Runner.Listener
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
_accessTokenRevoked = true;
throw;
}
catch (Exception ex)
@@ -156,16 +154,9 @@ namespace GitHub.Runner.Listener
{
if (_session != null && _session.SessionId != Guid.Empty)
{
if (!_accessTokenRevoked)
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
}
}
else
{
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
}
}
}
@@ -214,7 +205,6 @@ namespace GitHub.Runner.Listener
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
_accessTokenRevoked = true;
throw;
}
catch (Exception ex)

View File

@@ -95,15 +95,7 @@ namespace GitHub.Runner.Listener
var unknownCommandlines = command.Validate();
if (unknownCommandlines.Count > 0)
{
string commandName = command.GetCommandName();
if (string.IsNullOrEmpty(commandName))
{
terminal.WriteError($"This command does not recognize the command-line input arguments: '{string.Join(", ", unknownCommandlines)}'. For usage refer to: .\\config.cmd --help or ./config.sh --help");
}
else
{
terminal.WriteError($"Unrecognized command-line input arguments for command {commandName}: '{string.Join(", ", unknownCommandlines)}'. For usage refer to: .\\config.cmd --help or ./config.sh --help");
}
terminal.WriteError($"Unrecognized command-line input arguments: '{string.Join(", ", unknownCommandlines)}'. For usage refer to: .\\config.cmd --help or ./config.sh --help");
}
// Defer to the Runner class to execute the command.

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -12,7 +12,6 @@ using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Linq;
using GitHub.Runner.Listener.Check;
using System.Collections.Generic;
namespace GitHub.Runner.Listener
{
@@ -408,27 +407,6 @@ namespace GitHub.Runner.Listener
{
autoUpdateInProgress = true;
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
#if DEBUG
// Can mock the update for testing
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
{
// The mock_update_messages.json file should be an object with keys being the current version and values being the targeted mock version object
// Example: { "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}
var mockUpdatesPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "mock_update_messages.json");
if (File.Exists(mockUpdatesPath))
{
var mockUpdateMessages = JsonUtility.FromString<Dictionary<string, AgentRefreshMessage>>(File.ReadAllText(mockUpdatesPath));
if (mockUpdateMessages.ContainsKey(BuildConstants.RunnerPackage.Version))
{
var mockTargetVersion = mockUpdateMessages[BuildConstants.RunnerPackage.Version].TargetVersion;
_term.WriteLine($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
Trace.Info($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
runnerUpdateMessage = new AgentRefreshMessage(runnerUpdateMessage.AgentId, mockTargetVersion, runnerUpdateMessage.Timeout);
}
}
}
#endif
var selfUpdater = HostContext.GetService<ISelfUpdater>();
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
Trace.Info("Refresh message received, kick-off selfupdate background process.");

View File

@@ -12,7 +12,6 @@ using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
@@ -105,7 +104,7 @@ namespace GitHub.Runner.Listener
}
}
await DownloadLatestRunner(token, updateMessage.TargetVersion);
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
// wait till all running job finish
@@ -207,7 +206,7 @@ namespace GitHub.Runner.Listener
/// </summary>
/// <param name="token"></param>
/// <returns></returns>
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion)
private async Task DownloadLatestRunner(CancellationToken token)
{
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
@@ -267,58 +266,15 @@ namespace GitHub.Runner.Listener
try
{
#if DEBUG
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
// Wrapped in #if DEBUG as this should not be in the RELEASE build
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
{
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
if (waitForDebugger)
{
int waitInSeconds = 20;
while (!Debugger.IsAttached && waitInSeconds-- > 0)
{
await Task.Delay(1000);
}
Debugger.Break();
}
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
if (_targetPackage.Platform.StartsWith("win"))
{
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
}
else
{
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
}
if (File.Exists(archiveFile))
{
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
}
else
{
archiveFile = null;
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
}
}
#endif
// archiveFile is not null only if we mocked it above
if (string.IsNullOrEmpty(archiveFile))
{
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
if (string.IsNullOrEmpty(archiveFile))
{
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
}
await ValidateRunnerHash(archiveFile, packageHashValue);
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
}
await ValidateRunnerHash(archiveFile, packageHashValue);
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
}
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
@@ -504,7 +460,7 @@ namespace GitHub.Runner.Listener
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"Runner download has been cancelled.");
Trace.Info($"Runner download has been canceled.");
throw;
}
catch (Exception ex)
@@ -805,7 +761,7 @@ namespace GitHub.Runner.Listener
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
// try run node.js to see if current node.js works fine after copy over to new location.
var nodeVersions = NodeUtil.BuiltInNodeVersions;
var nodeVersions = new[] { "node12", "node16" };
foreach (var nodeVersion in nodeVersions)
{
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
@@ -1070,7 +1026,7 @@ namespace GitHub.Runner.Listener
var stopWatch = Stopwatch.StartNew();
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
@@ -1104,8 +1060,6 @@ namespace GitHub.Runner.Listener
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: token);
if (exitCode != 0)

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
}
else
{
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile))
{
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
}
}
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile))
{

View File

@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
}
else
{
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile))
{
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
}
}
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile))
{

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
}
// Create a new token source for the parallel query. The parallel query should be
// cancelled after the first error is encountered. Otherwise the number of exceptions
// canceled after the first error is encountered. Otherwise the number of exceptions
// could get out of control for a large directory with access denied on every file.
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
{

View File

@@ -178,7 +178,7 @@ namespace GitHub.Runner.Worker
Message = $"Invoked ::stopCommand:: with token: [{stopToken}]",
Type = JobTelemetryType.ActionCommand
};
context.Global.JobTelemetry.Add(telemetry);
context.JobTelemetry.Add(telemetry);
}
if (isTokenInvalid && !allowUnsecureStopCommandTokens)

View File

@@ -651,10 +651,10 @@ namespace GitHub.Runner.Worker
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
{
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
// Some possible cases are:

View File

@@ -1,17 +1,18 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Collections.Generic;
namespace GitHub.Runner.Worker
{
@@ -140,7 +141,21 @@ namespace GitHub.Runner.Worker
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
ExecutionContext.WriteWebhookPayload();
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
@@ -171,16 +186,8 @@ namespace GitHub.Runner.Worker
// Load the inputs.
ExecutionContext.Debug("Loading inputs");
Dictionary<string, string> inputs;
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.UseContainerPathForTemplate) ?? false)
{
inputs = EvaluateStepInputs(stepHost);
}
else
{
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
}
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (KeyValuePair<string, string> input in inputs)
@@ -267,8 +274,8 @@ namespace GitHub.Runner.Worker
actionDirectory: definition.Directory,
localActionContainerSetupSteps: localActionContainerSetupSteps);
// Print out action details and log telemetry
handler.PrepareExecution(Stage);
// Print out action details
handler.PrintActionDetails(Stage);
// Run the task.
try
@@ -307,15 +314,6 @@ namespace GitHub.Runner.Worker
return didFullyEvaluate;
}
private Dictionary<String, String> EvaluateStepInputs(IStepHost stepHost)
{
DictionaryContextData expressionValues = ExecutionContext.GetExpressionValues(stepHost);
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, expressionValues, ExecutionContext.ExpressionFunctions);
return inputs;
}
private string GenerateDisplayName(ActionStep action, DictionaryContextData contextData, IExecutionContext context, out bool didFullyEvaluate)
{
ArgUtil.NotNull(context, nameof(context));

View File

@@ -1,16 +1,16 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
using System.Collections.ObjectModel;
using System.Linq;
namespace GitHub.Runner.Worker.Container
{
public class ContainerInfo
{
private IDictionary<string, string> _userMountVolumes;
private List<MountVolume> _mountVolumes;
private IDictionary<string, string> _userPortMappings;
private List<PortMapping> _portMappings;
@@ -68,7 +68,8 @@ namespace GitHub.Runner.Worker.Container
{
foreach (var volume in container.Volumes)
{
MountVolumes.Add(new MountVolume(volume, isUserProvided: true));
UserMountVolumes[volume] = volume;
MountVolumes.Add(new MountVolume(volume));
}
}
@@ -103,20 +104,19 @@ namespace GitHub.Runner.Worker.Container
return _environmentVariables;
}
}
public ReadOnlyCollection<MountVolume> UserMountVolumes
public IDictionary<string, string> UserMountVolumes
{
get
{
return MountVolumes.Where(v => !string.IsNullOrEmpty(v.UserProvidedValue)).ToList().AsReadOnly();
}
}
public ReadOnlyCollection<MountVolume> SystemMountVolumes
{
get
{
return MountVolumes.Where(v => string.IsNullOrEmpty(v.UserProvidedValue)).ToList().AsReadOnly();
if (_userMountVolumes == null)
{
_userMountVolumes = new Dictionary<string, string>();
}
return _userMountVolumes;
}
}
public List<MountVolume> MountVolumes
{
get
@@ -260,27 +260,18 @@ namespace GitHub.Runner.Worker.Container
public class MountVolume
{
public string UserProvidedValue { get; set; }
public MountVolume(string sourceVolumePath, string targetVolumePath, bool readOnly = false)
{
this.SourceVolumePath = sourceVolumePath;
this.TargetVolumePath = targetVolumePath;
this.ReadOnly = readOnly;
}
public MountVolume(string fromString)
{
ParseVolumeString(fromString);
}
public MountVolume(string fromString, bool isUserProvided)
{
ParseVolumeString(fromString);
if (isUserProvided)
{
UserProvidedValue = fromString;
}
}
private void ParseVolumeString(string volume)
{
var volumeSplit = volume.Split(":");

View File

@@ -192,12 +192,13 @@ namespace GitHub.Runner.Worker
{
Trace.Info($"User provided port: {port.Value}");
}
foreach (var mount in container.UserMountVolumes)
foreach (var volume in container.UserMountVolumes)
{
Trace.Info($"User provided volume: {mount.UserProvidedValue}");
Trace.Info($"User provided volume: {volume.Value}");
var mount = new MountVolume(volume.Value);
if (string.Equals(mount.SourceVolumePath, "/", StringComparison.OrdinalIgnoreCase))
{
executionContext.Warning($"Volume mount {mount.UserProvidedValue} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
executionContext.Warning($"Volume mount {volume.Value} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
}
}

View File

@@ -1,21 +1,25 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Worker.Handlers;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -48,7 +52,8 @@ namespace GitHub.Runner.Worker
Dictionary<string, string> IntraActionState { get; }
Dictionary<string, VariableValue> JobOutputs { get; }
ActionsEnvironmentReference ActionsEnvironment { get; }
ActionsStepTelemetry StepTelemetry { get; }
List<ActionsStepTelemetry> ActionsStepsTelemetry { get; }
List<JobTelemetry> JobTelemetry { get; }
DictionaryContextData ExpressionValues { get; }
IList<IFunctionInfo> ExpressionFunctions { get; }
JobContext JobContext { get; }
@@ -104,21 +109,12 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
void PublishStepTelemetry();
void ApplyContinueOnError(TemplateToken continueOnError);
void UpdateGlobalStepsContext();
void WriteWebhookPayload();
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
{
private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private const int _maxIssueMessageLength = 4096; // Don't send issue with huge message since we can't forward them from actions to check annotation.
private const int _maxIssueCountInTelemetry = 3; // Only send the first 3 issues to telemetry
private const int _maxIssueMessageLengthInTelemetry = 256; // Only send the first 256 characters of issue message to telemetry
private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
@@ -143,7 +139,6 @@ namespace GitHub.Runner.Worker
// only job level ExecutionContext will track throttling delay.
private long _totalThrottlingDelayInMilliseconds = 0;
private bool _stepTelemetryPublished = false;
public Guid Id => _record.Id;
public Guid EmbeddedId { get; private set; }
@@ -157,7 +152,8 @@ namespace GitHub.Runner.Worker
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
public ActionsStepTelemetry StepTelemetry { get; } = new ActionsStepTelemetry();
public List<ActionsStepTelemetry> ActionsStepsTelemetry { get; private set; }
public List<JobTelemetry> JobTelemetry { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
@@ -277,9 +273,9 @@ namespace GitHub.Runner.Worker
{
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
}
else
else
{
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
}
return;
}
@@ -298,20 +294,7 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step);
}
public IExecutionContext CreateChild(
Guid recordId,
string displayName,
string refName,
string scopeName,
string contextName,
ActionRunStage stage,
Dictionary<string, string> intraActionState = null,
int? recordOrder = null,
IPagingLogger logger = null,
bool isEmbedded = false,
CancellationTokenSource cancellationTokenSource = null,
Guid embeddedId = default(Guid),
string siblingScopeName = null)
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
{
Trace.Entering();
@@ -323,6 +306,7 @@ namespace GitHub.Runner.Worker
child.Stage = stage;
child.EmbeddedId = embeddedId;
child.SiblingScopeName = siblingScopeName;
child.JobTelemetry = JobTelemetry;
if (intraActionState == null)
{
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -362,9 +346,6 @@ namespace GitHub.Runner.Worker
}
child.IsEmbedded = isEmbedded;
child.StepTelemetry.StepId = recordId;
child.StepTelemetry.Stage = stage.ToString();
child.StepTelemetry.IsEmbedded = isEmbedded;
return child;
}
@@ -373,13 +354,7 @@ namespace GitHub.Runner.Worker
/// An embedded execution context shares the same record ID, record name, logger,
/// and a linked cancellation token.
/// </summary>
public IExecutionContext CreateEmbeddedChild(
string scopeName,
string contextName,
Guid embeddedId,
ActionRunStage stage,
Dictionary<string, string> intraActionState = null,
string siblingScopeName = null)
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
{
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
}
@@ -429,8 +404,6 @@ namespace GitHub.Runner.Worker
}
}
PublishStepTelemetry();
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
@@ -439,19 +412,14 @@ namespace GitHub.Runner.Worker
_logger.End();
UpdateGlobalStepsContext();
return Result.Value;
}
public void UpdateGlobalStepsContext()
{
// Skip if generated context name. Generated context names start with "__". After 3.2 the server will never send an empty context name.
if (!string.IsNullOrEmpty(ContextName) && !ContextName.StartsWith("__", StringComparison.Ordinal))
{
Global.StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
Global.StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
}
return Result.Value;
}
public void SetRunnerContext(string name, string value)
@@ -551,20 +519,11 @@ namespace GitHub.Runner.Worker
}
issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message);
if (issue.Message.Length > _maxIssueMessageLength)
{
issue.Message = issue.Message[.._maxIssueMessageLength];
}
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
if (_record.Order != null)
{
issue.Data["stepNumber"] = _record.Order.ToString();
}
if (issue.Type == IssueType.Error)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Error, logMessage);
@@ -580,6 +539,8 @@ namespace GitHub.Runner.Worker
}
else if (issue.Type == IssueType.Warning)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Warning, logMessage);
@@ -595,6 +556,9 @@ namespace GitHub.Runner.Worker
}
else if (issue.Type == IssueType.Notice)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
@@ -684,29 +648,22 @@ namespace GitHub.Runner.Worker
// Variables
Global.Variables = new Variables(HostContext, message.Variables);
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
{
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
}
// Environment variables shared across all actions
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
// Job defaults shared across all actions
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
// Job Telemetry
Global.JobTelemetry = new List<JobTelemetry>();
// ActionsStepTelemetry for entire job
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
// Actions environment
ActionsEnvironment = message.ActionsEnvironment;
// ActionsStepTelemetry
ActionsStepsTelemetry = new List<ActionsStepTelemetry>();
JobTelemetry = new List<JobTelemetry>();
// Service container info
Global.ServiceContainers = new List<ContainerInfo>();
@@ -938,83 +895,6 @@ namespace GitHub.Runner.Worker
return Root._matchers ?? Array.Empty<IssueMatcherConfig>();
}
public void PublishStepTelemetry()
{
if (!_stepTelemetryPublished)
{
// Add to the global steps telemetry only if we have something to log.
if (!string.IsNullOrEmpty(StepTelemetry?.Type))
{
if (!IsEmbedded)
{
StepTelemetry.Result = _record.Result;
}
if (!IsEmbedded &&
_record.FinishTime != null &&
_record.StartTime != null)
{
StepTelemetry.ExecutionTimeInSeconds = (int)Math.Ceiling((_record.FinishTime - _record.StartTime)?.TotalSeconds ?? 0);
}
if (!IsEmbedded &&
_record.Issues.Count > 0)
{
foreach (var issue in _record.Issues)
{
if ((issue.Type == IssueType.Error || issue.Type == IssueType.Warning) &&
!string.IsNullOrEmpty(issue.Message))
{
string issueTelemetry;
if (issue.Message.Length > _maxIssueMessageLengthInTelemetry)
{
issueTelemetry = $"{issue.Message[.._maxIssueMessageLengthInTelemetry]}";
}
else
{
issueTelemetry = issue.Message;
}
StepTelemetry.ErrorMessages.Add(issueTelemetry);
// Only send over the first 3 issues to avoid sending too much data.
if (StepTelemetry.ErrorMessages.Count >= _maxIssueCountInTelemetry)
{
break;
}
}
}
}
Trace.Info($"Publish step telemetry for current step {StringUtil.ConvertToJson(StepTelemetry)}.");
Global.StepsTelemetry.Add(StepTelemetry);
_stepTelemetryPublished = true;
}
}
else
{
Trace.Info($"Step telemetry has already been published.");
}
}
public void WriteWebhookPayload()
{
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
SetGitHubContext("event_path", workflowFile);
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{
_mainTimelineId = timelineId;
@@ -1069,36 +949,6 @@ namespace GitHub.Runner.Worker
var newGuid = Guid.NewGuid();
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, ActionRunStage.Post, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count, siblingScopeName: siblingScopeName);
}
public void ApplyContinueOnError(TemplateToken continueOnErrorToken)
{
if (Result != TaskResult.Failed)
{
return;
}
var continueOnError = false;
try
{
var templateEvaluator = this.ToPipelineTemplateEvaluator();
continueOnError = templateEvaluator.EvaluateStepContinueOnError(continueOnErrorToken, ExpressionValues, ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info("The step failed and an error occurred when attempting to determine whether to continue on error.");
Trace.Error(ex);
this.Error("The step failed and an error occurred when attempting to determine whether to continue on error.");
this.Error(ex);
}
if (continueOnError)
{
Outcome = Result;
Result = TaskResult.Succeeded;
Trace.Info($"Updated step result (continue on error)");
}
UpdateGlobalStepsContext();
}
}
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
@@ -1120,6 +970,7 @@ namespace GitHub.Runner.Worker
context.Error(ex.Message);
context.Debug(ex.ToString());
}
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void Error(this IExecutionContext context, string message)
{
@@ -1193,66 +1044,6 @@ namespace GitHub.Runner.Worker
{
return new TemplateTraceWriter(context);
}
public static DictionaryContextData GetExpressionValues(this IExecutionContext context, IStepHost stepHost)
{
if (stepHost is ContainerStepHost)
{
var expressionValues = context.ExpressionValues.Clone() as DictionaryContextData;
context.UpdatePathsInExpressionValues("github", expressionValues, stepHost);
context.UpdatePathsInExpressionValues("runner", expressionValues, stepHost);
return expressionValues;
}
else
{
return context.ExpressionValues.Clone() as DictionaryContextData;
}
}
private static void UpdatePathsInExpressionValues(this IExecutionContext context, string contextName, DictionaryContextData expressionValues, IStepHost stepHost)
{
var dict = expressionValues[contextName].AssertDictionary($"expected context {contextName} to be a dictionary");
context.ResolvePathsInExpressionValuesDictionary(dict, stepHost);
expressionValues[contextName] = dict;
}
private static void ResolvePathsInExpressionValuesDictionary(this IExecutionContext context, DictionaryContextData dict, IStepHost stepHost)
{
foreach (var key in dict.Keys.ToList())
{
if (dict[key] is StringContextData)
{
var value = dict[key].ToString();
if (!string.IsNullOrEmpty(value))
{
dict[key] = new StringContextData(stepHost.ResolvePathForStepHost(value));
}
}
else if (dict[key] is DictionaryContextData)
{
var innerDict = dict[key].AssertDictionary("expected dictionary");
context.ResolvePathsInExpressionValuesDictionary(innerDict, stepHost);
var updatedDict = new DictionaryContextData();
foreach (var k in innerDict.Keys.ToList())
{
updatedDict[k] = innerDict[k];
}
dict[key] = updatedDict;
}
else if (dict[key] is CaseSensitiveDictionaryContextData)
{
var innerDict = dict[key].AssertDictionary("expected dictionary");
context.ResolvePathsInExpressionValuesDictionary(innerDict, stepHost);
var updatedDict = new CaseSensitiveDictionaryContextData();
foreach (var k in innerDict.Keys.ToList())
{
updatedDict[k] = innerDict[k];
}
dict[key] = updatedDict;
}
}
}
}
internal sealed class TemplateTraceWriter : ObjectTemplating.ITraceWriter

View File

@@ -7,7 +7,6 @@ using GitHub.Runner.Sdk;
using System.Reflection;
using System.Threading;
using System.Collections.Generic;
using GitHub.Runner.Common.Util;
namespace GitHub.Runner.Worker.Expressions
{
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker.Expressions
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));

View File

@@ -15,7 +15,7 @@ namespace GitHub.Runner.Worker
{
void InitializeFiles(IExecutionContext context, ContainerInfo container);
void ProcessFiles(IExecutionContext context, ContainerInfo container);
}
public sealed class FileCommandManager : RunnerService, IFileCommandManager
@@ -57,7 +57,7 @@ namespace GitHub.Runner.Worker
TryDeleteFile(newPath);
File.Create(newPath).Dispose();
var pathToSet = container != null ? container.TranslateToContainerPath(newPath) : newPath;
var pathToSet = container != null ? container.TranslateToContainerPath(newPath) : newPath;
context.SetGitHubContext(fileCommand.ContextName, pathToSet);
}
}
@@ -66,7 +66,7 @@ namespace GitHub.Runner.Worker
{
foreach (var fileCommand in _commandExtensions)
{
try
try
{
fileCommand.ProcessCommand(context, Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix),container);
}
@@ -181,10 +181,6 @@ namespace GitHub.Runner.Worker
{
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
}
if (newline == null)
{
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
}
endIndex = index - newline.Length;
tempLine = ReadLine(text, ref index, out newline);
}
@@ -263,72 +259,4 @@ namespace GitHub.Runner.Worker
return text.Substring(originalIndex, lfIndex - originalIndex);
}
}
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
{
public const int AttachmentSizeLimit = 1024 * 1024;
public string ContextName => "step_summary";
public string FilePrefix => "step_summary_";
public Type ExtensionType => typeof(IFileCommandExtension);
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
{
if (!context.Global.Variables.GetBoolean("DistributedTask.UploadStepSummary") ?? true)
{
Trace.Info("Step Summary is disabled; skipping attachment upload");
return;
}
if (String.IsNullOrEmpty(filePath) || !File.Exists(filePath))
{
Trace.Info($"Step Summary file ({filePath}) does not exist; skipping attachment upload");
return;
}
try
{
var fileSize = new FileInfo(filePath).Length;
if (fileSize == 0)
{
Trace.Info($"Step Summary file ({filePath}) is empty; skipping attachment upload");
return;
}
if (fileSize > AttachmentSizeLimit)
{
context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, AttachmentSizeLimit / 1024, fileSize / 1024));
Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload");
return;
}
Trace.Verbose($"Step Summary file exists: {filePath} and has a file size of {fileSize} bytes");
var scrubbedFilePath = filePath + "-scrubbed";
using (var streamReader = new StreamReader(filePath))
using (var streamWriter = new StreamWriter(scrubbedFilePath))
{
string line;
while ((line = streamReader.ReadLine()) != null)
{
var maskedLine = HostContext.SecretMasker.MaskSecrets(line);
streamWriter.WriteLine(maskedLine);
}
}
var attachmentName = context.Id.ToString();
Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})");
// Attachments must be added to the parent context (job), not the current context (step)
context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath);
}
catch (Exception e)
{
Trace.Error($"Error while processing file ({filePath}): {e}");
context.Error($"Failed to create step summary using 'GITHUB_STEP_SUMMARY': {e.Message}");
}
}
}
}

View File

@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
{
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"action",
"action_path",
"action_ref",
"action_repository",
"action",
"actor",
"api_url",
"base_ref",
@@ -22,20 +22,18 @@ namespace GitHub.Runner.Worker
"head_ref",
"job",
"path",
"ref",
"ref_name",
"ref_protected",
"ref_type",
"ref",
"repository_owner",
"repository",
"repository_owner",
"retention_days",
"run_attempt",
"run_id",
"run_number",
"server_url",
"sha",
"step_summary",
"triggering_actor",
"workflow",
"workspace",
};

View File

@@ -14,8 +14,6 @@ namespace GitHub.Runner.Worker
public PlanFeatures Features { get; set; }
public IList<String> FileTable { get; set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
public List<JobTelemetry> JobTelemetry { get; set; }
public TaskOrchestrationPlanReference Plan { get; set; }
public List<string> PrependPath { get; set; }
public List<ContainerInfo> ServiceContainers { get; set; }

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
@@ -11,6 +13,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Expressions;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -39,7 +42,7 @@ namespace GitHub.Runner.Worker.Handlers
{
ArgUtil.NotNull(Data.PreSteps, nameof(Data.PreSteps));
steps = Data.PreSteps;
}
}
else if (stage == ActionRunStage.Post)
{
ArgUtil.NotNull(Data.PostSteps, nameof(Data.PostSteps));
@@ -57,14 +60,14 @@ namespace GitHub.Runner.Worker.Handlers
Trace.Info($"Skipping executing post step id: {step.Id}, name: ${step.DisplayName}");
}
}
}
}
else
{
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
steps = Data.Steps;
}
// Set extra telemetry base on the current context.
// Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main)
{
var hasRunsStep = false;
@@ -80,16 +83,20 @@ namespace GitHub.Runner.Worker.Handlers
hasUsesStep = true;
}
}
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
ExecutionContext.StepTelemetry.HasRunsStep = hasRunsStep;
ExecutionContext.StepTelemetry.HasUsesStep = hasUsesStep;
ExecutionContext.StepTelemetry.StepCount = steps.Count;
var pathReference = Action as Pipelines.RepositoryPathReference;
var telemetry = new ActionsStepTelemetry {
Ref = GetActionRef(),
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = "composite",
HasRunsStep = hasRunsStep,
HasUsesStep = hasUsesStep,
StepCount = steps.Count
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
ExecutionContext.StepTelemetry.Type = "composite";
try
{
// Inputs of the composite step
@@ -110,7 +117,7 @@ namespace GitHub.Runner.Worker.Handlers
// Create embedded steps
var embeddedSteps = new List<IStep>();
// If we need to setup containers beforehand, do it
// If we need to setup containers beforehand, do it
// only relevant for local composite actions that need to JIT download/setup containers
if (LocalActionContainerSetupSteps != null && LocalActionContainerSetupSteps.Count > 0)
{
@@ -145,7 +152,7 @@ namespace GitHub.Runner.Worker.Handlers
}
else
{
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
}
// Shallow copy github context
@@ -296,13 +303,13 @@ namespace GitHub.Runner.Worker.Handlers
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was cancelled after the condition was originally evaluated.
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
{
// Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
@@ -386,7 +393,7 @@ namespace GitHub.Runner.Worker.Handlers
{
await RunStepAsync(step);
}
}
finally
{
@@ -396,7 +403,7 @@ namespace GitHub.Runner.Worker.Handlers
jobCancelRegister = null;
}
}
// Check failed or cancelled
// Check failed or canceled
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
{
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
@@ -404,7 +411,7 @@ namespace GitHub.Runner.Worker.Handlers
}
// Update context
step.ExecutionContext.UpdateGlobalStepsContext();
SetStepsContext(step);
}
}
@@ -449,17 +456,23 @@ namespace GitHub.Runner.Worker.Handlers
SetStepConclusion(step, Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value));
}
step.ExecutionContext.ApplyContinueOnError(step.ContinueOnError);
Trace.Info($"Step result: {step.ExecutionContext.Result}");
step.ExecutionContext.Debug($"Finished: {step.DisplayName}");
step.ExecutionContext.PublishStepTelemetry();
}
private void SetStepConclusion(IStep step, TaskResult result)
{
step.ExecutionContext.Result = result;
step.ExecutionContext.UpdateGlobalStepsContext();
SetStepsContext(step);
}
private void SetStepsContext(IStep step)
{
if (!string.IsNullOrEmpty(step.ExecutionContext.ContextName) && !step.ExecutionContext.ContextName.StartsWith("__", StringComparison.Ordinal))
{
// TODO: when we support continue on error, we may need to do logic here to change conclusion based on the continue on error result
step.ExecutionContext.Global.StepsContext.SetOutcome(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
step.ExecutionContext.Global.StepsContext.SetConclusion(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
}
}
}
}

View File

@@ -1,14 +1,14 @@
using System;
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System;
using GitHub.Runner.Worker.Container;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
namespace GitHub.Runner.Worker.Handlers
{
@@ -70,13 +70,18 @@ namespace GitHub.Runner.Worker.Handlers
}
string type = Action.Type == Pipelines.ActionSourceType.Repository ? "Dockerfile" : "DockerHub";
// Set extra telemetry base on the current context.
// Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main)
{
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
var telemetry = new ActionsStepTelemetry {
Ref = GetActionRef(),
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = type
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
ExecutionContext.StepTelemetry.Type = type;
// run container
var container = new ContainerInfo(HostContext)

View File

@@ -1,13 +1,13 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System.Linq;
using System.IO;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers
{
@@ -22,7 +22,7 @@ namespace GitHub.Runner.Worker.Handlers
string ActionDirectory { get; set; }
List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
Task RunAsync(ActionRunStage stage);
void PrepareExecution(ActionRunStage stage);
void PrintActionDetails(ActionRunStage stage);
}
public abstract class Handler : RunnerService
@@ -36,7 +36,6 @@ namespace GitHub.Runner.Worker.Handlers
protected IActionCommandManager ActionCommandManager { get; private set; }
public Pipelines.ActionStepDefinitionReference Action { get; set; }
public bool IsActionStep => Action != null;
public Dictionary<string, string> Environment { get; set; }
public Variables RuntimeVariables { get; set; }
public IExecutionContext ExecutionContext { get; set; }
@@ -45,50 +44,29 @@ namespace GitHub.Runner.Worker.Handlers
public string ActionDirectory { get; set; }
public List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
public void PrepareExecution(ActionRunStage stage)
public virtual string GetActionRef()
{
// Print out action details
PrintActionDetails(stage);
// Get telemetry for the action
PopulateActionTelemetry(stage);
}
protected void PopulateActionTelemetry(ActionRunStage stage)
{
if (!IsActionStep)
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
ExecutionContext.StepTelemetry.Type = "runner";
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
}
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
ExecutionContext.StepTelemetry.Type = "docker";
var registryAction = Action as Pipelines.ContainerRegistryReference;
ExecutionContext.StepTelemetry.Action = registryAction.Image;
}
else if (Action.Type == Pipelines.ActionSourceType.Script)
{
ExecutionContext.StepTelemetry.Type = "run";
return registryAction.Image;
}
else if (Action.Type == Pipelines.ActionSourceType.Repository)
{
ExecutionContext.StepTelemetry.Type = "repository";
var repoAction = Action as Pipelines.RepositoryPathReference;
if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
ExecutionContext.StepTelemetry.Action = repoAction.Path;
return repoAction.Path;
}
else
{
ExecutionContext.StepTelemetry.Ref = repoAction.Ref;
if (string.IsNullOrEmpty(repoAction.Path))
{
ExecutionContext.StepTelemetry.Action = repoAction.Name;
return $"{repoAction.Name}@{repoAction.Ref}";
}
else
{
ExecutionContext.StepTelemetry.Action = $"{repoAction.Name}/{repoAction.Path}";
return $"{repoAction.Name}/{repoAction.Path}@{repoAction.Ref}";
}
}
}
@@ -97,11 +75,11 @@ namespace GitHub.Runner.Worker.Handlers
// this should never happen
Trace.Error($"Can't generate ref for {Action.Type.ToString()}");
}
return "";
}
protected virtual void PrintActionDetails(ActionRunStage stage)
public virtual void PrintActionDetails(ActionRunStage stage)
{
if (stage == ActionRunStage.Post)
{
ExecutionContext.Output($"Post job cleanup.");

View File

@@ -55,23 +55,7 @@ namespace GitHub.Runner.Worker.Handlers
else if (data.ExecutionType == ActionExecutionType.NodeJS)
{
handler = HostContext.CreateService<INodeScriptActionHandler>();
var nodeData = data as NodeJSActionExecutionData;
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
{
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
if (!isOptOut)
{
nodeData.NodeVersion = "node16";
}
}
(handler as INodeScriptActionHandler).Data = nodeData;
(handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData;
}
else if (data.ExecutionType == ActionExecutionType.Script)
{

View File

@@ -1,13 +1,12 @@
using System;
using System.IO;
using System.Linq;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using System;
using System.Linq;
namespace GitHub.Runner.Worker.Handlers
{
@@ -75,13 +74,19 @@ namespace GitHub.Runner.Worker.Handlers
target = Data.Post;
}
// Set extra telemetry base on the current context.
// Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main)
{
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
var telemetry = new ActionsStepTelemetry
{
Ref = GetActionRef(),
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = Data.NodeVersion
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
ExecutionContext.StepTelemetry.Type = Data.NodeVersion;
ArgUtil.NotNullOrEmpty(target, nameof(target));
target = Path.Combine(ActionDirectory, target);
@@ -94,14 +99,6 @@ namespace GitHub.Runner.Worker.Handlers
workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
}
#if OS_OSX
if (string.Equals(Data.NodeVersion, "node12", StringComparison.OrdinalIgnoreCase) &&
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm64))
{
ExecutionContext.Output($"The node12 is not supported on macOS ARM64 platform. Use node16 instead.");
Data.NodeVersion = "node16";
}
#endif
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext, Data.NodeVersion);
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
@@ -122,17 +119,6 @@ namespace GitHub.Runner.Worker.Handlers
// Remove environment variable that may cause conflicts with the node within the runner.
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
if (Data.NodeVersion == "node12" && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node12Warning) ?? false))
{
if (!ExecutionContext.JobContext.ContainsKey("Node12ActionsWarnings"))
{
ExecutionContext.JobContext["Node12ActionsWarnings"] = new ArrayContextData();
}
var repoAction = Action as RepositoryPathReference;
var actionDisplayName = new StringContextData(repoAction.Name ?? repoAction.Path); // local actions don't have a 'Name'
ExecutionContext.JobContext["Node12ActionsWarnings"].AssertArray("Node12ActionsWarnings").Add(actionDisplayName);
}
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager))
{

View File

@@ -151,11 +151,6 @@ namespace GitHub.Runner.Worker.Handlers
}
}
if (line.Contains("fatal: unsafe repository", StringComparison.OrdinalIgnoreCase))
{
_executionContext.StepTelemetry.ErrorMessages.Add(line);
}
// Regular output
_executionContext.Output(line);
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using System.Threading.Tasks;
using System;
using GitHub.Runner.Sdk;
using GitHub.Runner.Common;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
@@ -35,8 +35,6 @@ namespace GitHub.Runner.Worker.Handlers
}
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));
// Set extra telemetry base on the current context.
ExecutionContext.StepTelemetry.Type = plugin;
// Update the env dictionary.
AddPrependPathToEnvironment();

View File

@@ -1,13 +1,12 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Linq;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
@@ -22,24 +21,18 @@ namespace GitHub.Runner.Worker.Handlers
{
public ScriptActionExecutionData Data { get; set; }
protected override void PrintActionDetails(ActionRunStage stage)
public override void PrintActionDetails(ActionRunStage stage)
{
// if we're executing a Job Extension, we won't have an 'Action'
if (!IsActionStep)
if (stage == ActionRunStage.Post)
{
if (Inputs.TryGetValue("path", out var path))
{
ExecutionContext.Output($"##[group]Run '{path}'");
}
else
{
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
}
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
else if (Action.Type == Pipelines.ActionSourceType.Script)
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
if (Action.Type == Pipelines.ActionSourceType.Script)
{
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0)
@@ -48,16 +41,17 @@ namespace GitHub.Runner.Worker.Handlers
}
ExecutionContext.Output($"##[group]Run {firstLine}");
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
}
else
{
throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}");
throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}");
}
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
string argFormat;
@@ -137,6 +131,11 @@ namespace GitHub.Runner.Worker.Handlers
public async Task RunAsync(ActionRunStage stage)
{
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
// Validate args
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
@@ -145,6 +144,17 @@ namespace GitHub.Runner.Worker.Handlers
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
// Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main)
{
var telemetry = new ActionsStepTelemetry
{
IsEmbedded = ExecutionContext.IsEmbedded,
Type = "run",
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
Inputs.TryGetValue("script", out var contents);
@@ -153,8 +163,7 @@ namespace GitHub.Runner.Worker.Handlers
string workingDirectory = null;
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
{
// Don't use job level working directories for hooks
if (IsActionStep && string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
{
@@ -213,35 +222,15 @@ namespace GitHub.Runner.Worker.Handlers
}
}
// Don't override runner telemetry here
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
{
ExecutionContext.StepTelemetry.Action = shellCommand;
}
// No arg format was given, shell must be a built-in
if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}"))
{
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
}
string scriptFilePath, resolvedScriptPath;
if (IsActionStep)
{
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
}
else
{
// JobExtensionRunners run a script file, we load that from the inputs here
if (!Inputs.ContainsKey("path"))
{
throw new ArgumentException("Expected 'path' input to be set");
}
scriptFilePath = Inputs["path"];
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
}
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
// Format arg string with script path
var arguments = string.Format(argFormat, resolvedScriptPath);
@@ -257,12 +246,9 @@ namespace GitHub.Runner.Worker.Handlers
#else
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
var encoding = new UTF8Encoding(false);
#endif
if (IsActionStep)
{
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
}
#endif
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
// Prepend PATH
AddPrependPathToEnvironment();
@@ -285,10 +271,10 @@ namespace GitHub.Runner.Worker.Handlers
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
{
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
fileName = node;
fileName = node12;
}
#endif
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers
{
@@ -81,22 +79,5 @@ namespace GitHub.Runner.Worker.Handlers
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
}
}
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
{
var format = "{0} {1}";
switch (Path.GetExtension(path))
{
case ".sh":
// use 'sh' args but prefer bash
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
return string.Format(format, pathToShell, _defaultArguments["sh"]);
case ".ps1":
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
default:
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
}
}
}
}

View File

@@ -1,13 +1,18 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Linq;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.Runner.Worker.Handlers
{

View File

@@ -15,7 +15,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
@@ -57,8 +56,6 @@ namespace GitHub.Runner.Worker
// Create a new timeline record for 'Set up job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre);
context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "setup_job";
List<IStep> preJobSteps = new List<IStep>();
List<IStep> jobSteps = new List<IStep>();
@@ -249,19 +246,6 @@ namespace GitHub.Runner.Worker
Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
// add hook to preJobSteps
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
if (!string.IsNullOrEmpty(startedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobStartedStepName,
data: (object)jobHookData));
}
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps
@@ -329,8 +313,6 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, stepId.ToString("N"), null, stepId.ToString("N"), ActionRunStage.Pre);
extensionStep.ExecutionContext.StepTelemetry.Type = "runner";
extensionStep.ExecutionContext.StepTelemetry.Action = extensionStep.DisplayName.ToLowerInvariant().Replace(' ', '_');
}
else if (step is IActionRunner actionStep)
{
@@ -351,18 +333,6 @@ namespace GitHub.Runner.Worker
}
}
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobCompletedStepName,
data: (object)jobHookData));
}
List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps);
steps.AddRange(jobSteps);
@@ -431,8 +401,6 @@ namespace GitHub.Runner.Worker
// create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "complete_job";
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{
try

View File

@@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(JobHookProvider))]
public interface IJobHookProvider : IRunnerService
{
Task RunHook(IExecutionContext executionContext, object data);
}
public class JobHookData
{
public string Path {get; private set;}
public ActionRunStage Stage {get; private set;}
public JobHookData(ActionRunStage stage, string path)
{
Path = path;
Stage = stage;
}
}
public class JobHookProvider : RunnerService, IJobHookProvider
{
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
}
public async Task RunHook(IExecutionContext executionContext, object data)
{
// Get Inputs
var hookData = data as JobHookData;
ArgUtil.NotNull(hookData, nameof(JobHookData));
var displayName = hookData.Stage == ActionRunStage.Pre ? "job started hook" : "job completed hook";
// Log to users so that they know how this step was injected
executionContext.Output($"A {displayName} has been configured by the self-hosted runner administrator");
// Validate script file.
if (!File.Exists(hookData.Path))
{
throw new FileNotFoundException("File doesn't exist");
}
executionContext.WriteWebhookPayload();
// Create the handler data.
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
var stepHost = HostContext.CreateService<IDefaultStepHost>();
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
Dictionary<string, string> inputs = new()
{
["path"] = hookData.Path,
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
};
// Create the handler
var handlerFactory = HostContext.GetService<IHandlerFactory>();
var handler = handlerFactory.Create(
executionContext,
action: null,
stepHost,
new ScriptActionExecutionData(),
inputs,
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
executionContext.Global.Variables,
actionDirectory: scriptDirectory,
localActionContainerSetupSteps: null);
handler.PrepareExecution(hookData.Stage);
// Setup file commands
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(executionContext, null);
// Run the step and process the file commands
try
{
await handler.RunAsync(hookData.Stage);
}
finally
{
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
}
}
}
}

View File

@@ -6,7 +6,6 @@ using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -132,9 +131,9 @@ namespace GitHub.Runner.Worker
}
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
{
// set the job to cancelled
// set the job to canceled
// don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job is cancelled during initialize.");
Trace.Error($"Job is canceled during initialize.");
Trace.Error($"Caught exception: {ex}");
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
}
@@ -258,12 +257,6 @@ namespace GitHub.Runner.Worker
}
}
if (jobContext.JobContext.ContainsKey("Node12ActionsWarnings"))
{
var actions = string.Join(", ", jobContext.JobContext["Node12ActionsWarnings"].AssertArray("Node12ActionsWarnings").Select(action => action.ToString()));
jobContext.Warning(string.Format(Constants.Runner.Node12DetectedAfterEndOfLife, actions));
}
try
{
await ShutdownQueue(throwOnFailure: true);
@@ -286,13 +279,14 @@ namespace GitHub.Runner.Worker
}
// Load any upgrade telemetry
LoadFromTelemetryFile(jobContext.Global.JobTelemetry);
LoadFromTelemetryFile(jobContext.JobTelemetry);
// Make sure we don't submit secrets as telemetry
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
MaskTelemetrySecrets(jobContext.JobTelemetry);
Trace.Info("Raising job completed event.");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.ActionsStepsTelemetry, jobContext.JobTelemetry);
Trace.Info($"Raising job completed event");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.Global.StepsTelemetry, jobContext.Global.JobTelemetry);
var completeJobRetryLimit = 5;
var exceptions = new List<Exception>();

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
@@ -131,13 +130,11 @@ namespace GitHub.Runner.Worker
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was cancelled after the condition was originally evaluated.
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() =>
{
// Mark job as Cancelled or Failed depending on HostContext shutdown token's cancellation
jobContext.Result = HostContext.RunnerShutdownToken.IsCancellationRequested
? TaskResult.Failed
: TaskResult.Canceled;
// Mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
@@ -175,10 +172,8 @@ namespace GitHub.Runner.Worker
{
if (jobContext.Result != TaskResult.Canceled)
{
// Mark job as Cancelled or Failed depending on HostContext shutdown token's cancellation
jobContext.Result = HostContext.RunnerShutdownToken.IsCancellationRequested
? TaskResult.Failed
: TaskResult.Canceled;
// Mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
}
}
@@ -324,8 +319,29 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.Result = TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
}
step.ExecutionContext.ApplyContinueOnError(step.ContinueOnError);
// Fixup the step result if ContinueOnError
if (step.ExecutionContext.Result == TaskResult.Failed)
{
var continueOnError = false;
try
{
continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info("The step failed and an error occurred when attempting to determine whether to continue on error.");
Trace.Error(ex);
step.ExecutionContext.Error("The step failed and an error occurred when attempting to determine whether to continue on error.");
step.ExecutionContext.Error(ex);
}
if (continueOnError)
{
step.ExecutionContext.Outcome = step.ExecutionContext.Result;
step.ExecutionContext.Result = TaskResult.Succeeded;
Trace.Info($"Updated step result (continue on error)");
}
}
Trace.Info($"Step result: {step.ExecutionContext.Result}");
// Complete the step context

View File

@@ -129,10 +129,9 @@
"required": true
},
"env": "step-env",
"continue-on-error": "boolean-steps-context",
"working-directory": "string-steps-context",
"shell": {
"type": "string-steps-context",
"type": "non-empty-string",
"required": true
}
}
@@ -148,7 +147,6 @@
"type": "non-empty-string",
"required": true
},
"continue-on-error": "boolean-steps-context",
"with": "step-with",
"env": "step-env"
}
@@ -203,20 +201,6 @@
],
"string": {}
},
"boolean-steps-context": {
"context": [
"github",
"inputs",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"boolean": {}
},
"step-env": {
"context": [
"github",

View File

@@ -23,6 +23,7 @@ namespace GitHub.Services.Common.ClientStorage
private readonly string m_filePath;
private readonly VssFileStorageReader m_reader;
private readonly IVssClientStorageWriter m_writer;
private const char c_defaultPathSeparator = '\\';
private const bool c_defaultIgnoreCaseInPaths = false;
@@ -191,7 +192,7 @@ namespace GitHub.Services.Common.ClientStorage
// Windows Impersonation is being used.
// Check to see if we can find the user's local application data directory.
string subDir = Path.Combine("GitHub", "ActionsService");
string subDir = "GitHub\\ActionsService";
string path = Environment.GetEnvironmentVariable("localappdata");
SafeGetFolderPath(Environment.SpecialFolder.LocalApplicationData);
if (string.IsNullOrEmpty(path))

View File

@@ -1,4 +1,4 @@
/*
/*
* ---------------------------------------------------------
* Copyright(C) Microsoft Corporation. All rights reserved.
* ---------------------------------------------------------
@@ -324,7 +324,6 @@ namespace GitHub.DistributedTask.WebApi
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="jobId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
@@ -332,7 +331,6 @@ namespace GitHub.DistributedTask.WebApi
Guid scopeIdentifier,
string hubName,
Guid planId,
Guid jobId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
@@ -341,15 +339,12 @@ namespace GitHub.DistributedTask.WebApi
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
queryParams.Add("jobId", jobId);
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken,
content: content);

View File

@@ -38,6 +38,20 @@ namespace GitHub.DistributedTask.Logging
return Base64StringEscapeShift(value, 2);
}
public static String BashComparand(String value)
{
var result = new StringBuilder();
foreach (var c in value)
{
if (!char.IsLowSurrogate(c))
{
result.Append('\\');
}
result.Append(c);
}
return result.ToString();
}
// Used when we pass environment variables to docker to escape " with \"
public static String CommandLineArgumentEscape(String value)
{

View File

@@ -1,6 +1,4 @@
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
@@ -10,13 +8,6 @@ namespace GitHub.DistributedTask.WebApi
[DataContract]
public class ActionsStepTelemetry
{
public ActionsStepTelemetry()
{
this.ErrorMessages = new List<string>();
}
[DataMember(EmitDefaultValue = false)]
public string Action { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Ref { get; set; }
@@ -24,15 +15,9 @@ namespace GitHub.DistributedTask.WebApi
[DataMember(EmitDefaultValue = false)]
public string Type { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Stage { get; set; }
[DataMember(EmitDefaultValue = false)]
public Guid StepId { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool? HasRunsStep { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool? HasUsesStep { get; set; }
@@ -47,14 +32,5 @@ namespace GitHub.DistributedTask.WebApi
[DataMember(EmitDefaultValue = false)]
public int? StepCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public TaskResult? Result { get; set; }
[DataMember(EmitDefaultValue = false)]
public List<string> ErrorMessages { get; set; }
[DataMember(EmitDefaultValue = false)]
public int? ExecutionTimeInSeconds { get; set; }
}
}

View File

@@ -102,10 +102,4 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
}
[GenerateAllConstants]
public class ChecksAttachmentType
{
public static readonly String StepSummary = "Checks.Step.Summary";
}
}

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64</RuntimeIdentifiers>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>

View File

@@ -19,8 +19,7 @@ namespace GitHub.Runner.Common.Tests
"linux-x64",
"linux-arm",
"linux-arm64",
"osx-x64",
"osx-arm64"
"osx-x64"
};
Assert.True(BuildConstants.Source.CommitHash.Length == 40, $"CommitHash should be SHA-1 hash {BuildConstants.Source.CommitHash}");

View File

@@ -708,85 +708,33 @@ namespace GitHub.Runner.Common.Tests
}
}
[Theory]
[InlineData("configure", "once")]
[InlineData("remove", "disableupdate")]
[InlineData("remove", "ephemeral")]
[InlineData("remove", "once")]
[InlineData("remove", "replace")]
[InlineData("remove", "runasservice")]
[InlineData("remove", "unattended")]
[InlineData("run", "disableupdate")]
[InlineData("run", "ephemeral")]
[InlineData("run", "replace")]
[InlineData("run", "runasservice")]
[InlineData("run", "unattended")]
[InlineData("warmup", "disableupdate")]
[InlineData("warmup", "ephemeral")]
[InlineData("warmup", "once")]
[InlineData("warmup", "replace")]
[InlineData("warmup", "runasservice")]
[InlineData("warmup", "unattended")]
[Fact]
[Trait("Level", "L0")]
[Trait("Category", nameof(CommandSettings))]
public void ValidateInvalidFlagCommandCombination(string validCommand, string flag)
public void ValidateFlags()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{flag}" });
var command = new CommandSettings(hc, args: new string[] { "--badflag" });
// Assert.
Assert.Contains(flag, command.Validate());
Assert.Contains("badflag", command.Validate());
}
}
[Theory]
[InlineData("remove", "auth", "bar arg value")]
[InlineData("remove", "labels", "bar arg value")]
[InlineData("remove", "monitorsocketaddress", "bar arg value")]
[InlineData("remove", "name", "bar arg value")]
[InlineData("remove", "runnergroup", "bar arg value")]
[InlineData("remove", "url", "bar arg value")]
[InlineData("remove", "username", "bar arg value")]
[InlineData("remove", "windowslogonaccount", "bar arg value")]
[InlineData("remove", "windowslogonpassword", "bar arg value")]
[InlineData("remove", "work", "bar arg value")]
[InlineData("run", "auth", "bad arg value")]
[InlineData("run", "labels", "bad arg value")]
[InlineData("run", "monitorsocketaddress", "bad arg value")]
[InlineData("run", "name", "bad arg value")]
[InlineData("run", "pat", "bad arg value")]
[InlineData("run", "runnergroup", "bad arg value")]
[InlineData("run", "token", "bad arg value")]
[InlineData("run", "url", "bad arg value")]
[InlineData("run", "username", "bad arg value")]
[InlineData("run", "windowslogonaccount", "bad arg value")]
[InlineData("run", "windowslogonpassword", "bad arg value")]
[InlineData("run", "work", "bad arg value")]
[InlineData("warmup", "auth", "bad arg value")]
[InlineData("warmup", "labels", "bad arg value")]
[InlineData("warmup", "monitorsocketaddress", "bad arg value")]
[InlineData("warmup", "name", "bad arg value")]
[InlineData("warmup", "pat", "bad arg value")]
[InlineData("warmup", "runnergroup", "bad arg value")]
[InlineData("warmup", "token", "bad arg value")]
[InlineData("warmup", "url", "bad arg value")]
[InlineData("warmup", "username", "bad arg value")]
[InlineData("warmup", "windowslogonaccount", "bad arg value")]
[InlineData("warmup", "windowslogonpassword", "bad arg value")]
[InlineData("warmup", "work", "bad arg value")]
[Fact]
[Trait("Level", "L0")]
[Trait("Category", nameof(CommandSettings))]
public void ValidateInvalidArgCommandCombination(string validCommand, string arg, string argValue)
public void ValidateArgs()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{arg}", argValue });
var command = new CommandSettings(hc, args: new string[] { "--badargname", "bad arg value" });
// Assert.
Assert.Contains(arg, command.Validate());
Assert.Contains("badargname", command.Validate());
}
}
@@ -810,73 +758,6 @@ namespace GitHub.Runner.Common.Tests
}
}
[Theory]
[InlineData("configure", "help")]
[InlineData("configure", "version")]
[InlineData("configure", "commit")]
[InlineData("configure", "check")]
[InlineData("configure", "disableupdate")]
[InlineData("configure", "ephemeral")]
[InlineData("configure", "replace")]
[InlineData("configure", "runasservice")]
[InlineData("configure", "unattended")]
[InlineData("remove", "help")]
[InlineData("remove", "version")]
[InlineData("remove", "commit")]
[InlineData("remove", "check")]
[InlineData("run", "help")]
[InlineData("run", "version")]
[InlineData("run", "commit")]
[InlineData("run", "check")]
[InlineData("run", "once")]
[InlineData("warmup", "help")]
[InlineData("warmup", "version")]
[InlineData("warmup", "commit")]
[InlineData("warmup", "check")]
[Trait("Level", "L0")]
[Trait("Category", nameof(CommandSettings))]
public void ValidateGoodFlagCommandCombination(string validCommand, string flag)
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{flag}" });
// Assert.
Assert.True(command.Validate().Count == 0);
}
}
[Theory]
[InlineData("configure", "auth", "good arg value")]
[InlineData("configure", "labels", "good arg value")]
[InlineData("configure", "monitorsocketaddress", "good arg value")]
[InlineData("configure", "name", "good arg value")]
[InlineData("configure", "pat", "good arg value")]
[InlineData("configure", "runnergroup", "good arg value")]
[InlineData("configure", "token", "good arg value")]
[InlineData("configure", "url", "good arg value")]
[InlineData("configure", "username", "good arg value")]
[InlineData("configure", "windowslogonaccount", "good arg value")]
[InlineData("configure", "windowslogonpassword", "good arg value")]
[InlineData("configure", "work", "good arg value")]
[InlineData("remove", "token", "good arg value")]
[InlineData("remove", "pat", "good arg value")]
[InlineData("run", "startuptype", "good arg value")]
[Trait("Level", "L0")]
[Trait("Category", nameof(CommandSettings))]
public void ValidateGoodArgCommandCombination(string validCommand, string arg, string argValue)
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{arg}", argValue });
// Assert.
Assert.True(command.Validate().Count == 0);
}
}
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
{
TestHostContext hc = new TestHostContext(this, testName);

View File

@@ -1,18 +1,18 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using Moq;
using System;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using Xunit;
using System.Threading;
using System.Reflection;
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography;
namespace GitHub.Runner.Common.Tests.Listener
{
@@ -256,67 +256,5 @@ namespace GitHub.Runner.Common.Tests.Listener
tokenSource.Token), Times.Once());
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void SkipDeleteSession_WhenGetNextMessageGetTaskAgentAccessTokenExpiredException()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession();
PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(sessionIdProperty);
sessionIdProperty.SetValue(expectedSession, Guid.NewGuid());
_runnerServer
.Setup(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
MessageListener listener = new MessageListener();
listener.Initialize(tc);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.True(result);
_runnerServer
.Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token))
.Throws(new TaskAgentAccessTokenExpiredException("test"));
try
{
await listener.GetNextMessageAsync(tokenSource.Token);
}
catch (TaskAgentAccessTokenExpiredException)
{
//expected
}
finally
{
await listener.DeleteSessionAsync();
}
//Assert
_runnerServer
.Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token), Times.Once);
_runnerServer
.Verify(x => x.DeleteAgentSessionAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<CancellationToken>()), Times.Never);
}
}
}
}

View File

@@ -1,5 +1,4 @@
#if !(OS_OSX && ARM64)
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -51,9 +50,9 @@ namespace GitHub.Runner.Common.Tests.Listener
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, "https://github.com/actions/runner/releases/latest"));
if (response.StatusCode == System.Net.HttpStatusCode.Redirect)
{
var redirectUrl = response.Headers.Location.ToString();
var redirect = await response.Content.ReadAsStringAsync();
Regex regex = new Regex(@"/runner/releases/tag/v(?<version>\d+\.\d+\.\d+)");
var match = regex.Match(redirectUrl);
var match = regex.Match(redirect);
if (match.Success)
{
latestVersion = match.Groups["version"].Value;
@@ -64,10 +63,6 @@ namespace GitHub.Runner.Common.Tests.Listener
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.zip";
#endif
}
else
{
throw new Exception("The latest runner version could not be determined so a download URL could not be generated for it. Please check the location header of the redirect response of 'https://github.com/actions/runner/releases/latest'");
}
}
}
@@ -779,14 +774,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true);
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
{
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
}
else
{
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
}
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
}
}
finally
@@ -796,4 +784,3 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
}
#endif

Some files were not shown because too many files have changed in this diff Show More