mirror of
https://github.com/actions/runner.git
synced 2025-12-10 20:36:49 +00:00
Compare commits
3 Commits
v2.289.0
...
fhammerl/2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f4a2c808eb | ||
|
|
86122a034a | ||
|
|
f1ddeb0d06 |
16
.github/workflows/build.yml
vendored
16
.github/workflows/build.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -50,6 +50,13 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
${{ matrix.devScript }} test
|
||||
working-directory: src
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
@@ -73,13 +80,6 @@ jobs:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
${{ matrix.devScript }} test
|
||||
working-directory: src
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
if: github.event_name != 'pull_request'
|
||||
|
||||
15
.github/workflows/release.yml
vendored
15
.github/workflows/release.yml
vendored
@@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -260,17 +260,6 @@ jobs:
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
|
||||
# Create GitHub release
|
||||
- uses: actions/create-release@master
|
||||
id: createRelease
|
||||
@@ -536,4 +525,4 @@ jobs:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
asset_content_type: application/octet-stream
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -13,7 +13,6 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Run",
|
||||
@@ -26,7 +25,6 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Configure",
|
||||
@@ -40,7 +38,6 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Debug Worker",
|
||||
@@ -48,7 +45,6 @@
|
||||
"request": "attach",
|
||||
"processName": "Runner.Worker",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Attach Debugger",
|
||||
@@ -56,8 +52,6 @@
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
[](https://github.com/actions-canary/actions-runner-e2e/actions/workflows/runner_e2etest.yml)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ Compilation failures during a CI build should surface good error messages.
|
||||
|
||||
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
|
||||
|
||||
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which [problems matchers](https://code.visualstudio.com/docs/editor/tasks#_defining-a-problem-matcher) to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||
|
||||
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
|
||||
|
||||
@@ -18,23 +18,21 @@ The problem-matcher concept fits well with "setup" actions. For example, the `se
|
||||
|
||||
### Registration
|
||||
|
||||
#### Using `::` command
|
||||
#### Using `##` command
|
||||
|
||||
`::add-matcher::path-to-problem-matcher-config.json`
|
||||
`##[add-matcher]path-to-problem-matcher-config.json`
|
||||
|
||||
Using a `::` command allows for flexibility:
|
||||
Using a `##` command allows for flexibility:
|
||||
- Ad hoc scripts can register problem matchers
|
||||
- Allows problem matchers to be conditionally registered
|
||||
|
||||
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
|
||||
|
||||
Note, at some point the syntax changed from `##` to `::`.
|
||||
|
||||
#### Unregister using `::` command
|
||||
#### Unregister using `##` command
|
||||
|
||||
A way out for rare cases where scoping is a problem.
|
||||
|
||||
`::remove-matcher::owner`
|
||||
`##[remove-matcher]owner`
|
||||
|
||||
For this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
|
||||
@@ -106,7 +104,7 @@ message: ; expected
|
||||
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||
```
|
||||
|
||||
Additionally the line will appear red in the web UI (prefix with `::error`).
|
||||
Additionally the line will appear red in the web UI (prefix with `##[error]`).
|
||||
|
||||
Note, an error does not imply task failure. Exit codes communicate failure.
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ The runner will look for a file `.setup_info` under the runner's root directory,
|
||||
}
|
||||
]
|
||||
```
|
||||
The runner will use `::group` and `::endgroup` to fold all detail info into an expandable group.
|
||||
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
|
||||
|
||||
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
|
||||
|
||||
|
||||
@@ -6,35 +6,13 @@
|
||||
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/api/v3/zen
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent/_apis/health
|
||||
```
|
||||
|
||||
- The runner needs to access https://api.github.com for downloading actions.
|
||||
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine, replacing `[hostname]` with the hostname of your appliance, for instance `github.example.com`:
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
|
||||
A common cause of this these connectivity issues is if your to GitHub Enterprise Server appliance is using [the self-signed certificate that is enabled the first time](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) your appliance is started. As self-signed certificates are not trusted by web browsers and Git clients, these clients (including the GitHub Actions runner) will report certificate warnings.
|
||||
|
||||
We recommend [upload a certificate signed by a trusted authority](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) to GitHub Enterprise Server, or enabling the built-in ][Let's Encrypt support](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls).
|
||||
|
||||
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
|
||||
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
|
||||
|
||||
## What is checked?
|
||||
|
||||
@@ -64,4 +42,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
|
||||
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
|
||||
@@ -23,10 +23,6 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
|
||||
|
||||
  Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
|
||||
 cURL [Install here](https://curl.se/download.html) (needed for external sh script)
|
||||
|
||||
 Visual Studio 2017 or newer [Install here](https://visualstudio.microsoft.com) (needed for dev sh script)
|
||||
|
||||
## Quickstart: Run a job from a real repository
|
||||
|
||||
If you just want to get from building the sourcecode to using it to execute an action, you will need:
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
## Features
|
||||
|
||||
- Added `github.triggering_actor` to the `github` context (#1726)
|
||||
- Save step information when creating annotations (#1744)
|
||||
- Improved performance of live log streaming (#1730, #1755)
|
||||
- Added Beta support for job started and completed hooks (#1737)
|
||||
- N/A
|
||||
|
||||
## Bugs
|
||||
|
||||
- Fix breaking change in dotnet 6 around globalization-invariant. (#1609)
|
||||
|
||||
## Misc
|
||||
|
||||
- Made some minor job telemetry improvements (#1747)
|
||||
- Added repository name and workflow file name to `run.sh` output (#1761)
|
||||
|
||||
- N/A
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.289.0
|
||||
2.286.1
|
||||
|
||||
@@ -13,7 +13,7 @@ set -e
|
||||
|
||||
flags_found=false
|
||||
|
||||
while getopts 's:g:n:r:u:l:' opt; do
|
||||
while getopts 's:g:n:u:l:' opt; do
|
||||
flags_found=true
|
||||
|
||||
case $opt in
|
||||
@@ -26,9 +26,6 @@ while getopts 's:g:n:r:u:l:' opt; do
|
||||
n)
|
||||
runner_name=$OPTARG
|
||||
;;
|
||||
r)
|
||||
runner_group=$OPTARG
|
||||
;;
|
||||
u)
|
||||
svc_user=$OPTARG
|
||||
;;
|
||||
@@ -47,7 +44,6 @@ Usage:
|
||||
-s required scope: repo (:owner/:repo) or org (:organization)
|
||||
-g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment
|
||||
-n optional name of the runner, defaults to hostname
|
||||
-r optional name of the runner group to add the runner to, defaults to the Default group
|
||||
-u optional user svc will run as, defaults to current
|
||||
-l optional list of labels (split by comma) applied on the runner"
|
||||
exit 0
|
||||
@@ -63,7 +59,6 @@ if ! "$flags_found"; then
|
||||
runner_name=${3:-$(hostname)}
|
||||
svc_user=${4:-$USER}
|
||||
labels=${5}
|
||||
runner_group=${6}
|
||||
fi
|
||||
|
||||
# apply defaults
|
||||
@@ -169,8 +164,8 @@ fi
|
||||
|
||||
echo
|
||||
echo "Configuring ${runner_name} @ $runner_url"
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
|
||||
|
||||
#---------------------------------------
|
||||
# Configuring as a service
|
||||
|
||||
2
src/Misc/contentHash/externals/linux-arm
vendored
2
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +1 @@
|
||||
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
|
||||
6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e
|
||||
2
src/Misc/contentHash/externals/linux-arm64
vendored
2
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +1 @@
|
||||
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
|
||||
b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081
|
||||
2
src/Misc/contentHash/externals/linux-x64
vendored
2
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +1 @@
|
||||
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
|
||||
af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147
|
||||
2
src/Misc/contentHash/externals/osx-x64
vendored
2
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +1 @@
|
||||
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
|
||||
aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036
|
||||
2
src/Misc/contentHash/externals/win-x64
vendored
2
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +1 @@
|
||||
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
|
||||
40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665
|
||||
3374
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
3374
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -45,7 +45,7 @@ async function run(): Promise<void> {
|
||||
result.end()
|
||||
|
||||
if (hasMatch) {
|
||||
console.log(`Found ${count} files to hash.`)
|
||||
console.log(`Find ${count} files to hash.`)
|
||||
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
|
||||
} else {
|
||||
console.error(`__OUTPUT____OUTPUT__`)
|
||||
@@ -53,11 +53,3 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
run()
|
||||
.then(out => {
|
||||
console.log(out)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
@@ -3,7 +3,7 @@ PACKAGERUNTIME=$1
|
||||
PRECACHE=$2
|
||||
|
||||
NODE_URL=https://nodejs.org/dist
|
||||
NODE12_VERSION="12.22.7"
|
||||
NODE12_VERSION="12.13.1"
|
||||
NODE16_VERSION="16.13.0"
|
||||
|
||||
get_abs_path() {
|
||||
@@ -143,7 +143,7 @@ fi
|
||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
fi
|
||||
|
||||
@@ -3,135 +3,94 @@
|
||||
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
||||
|
||||
var childProcess = require("child_process");
|
||||
var path = require("path");
|
||||
var path = require("path")
|
||||
|
||||
var supported = ["linux", "darwin"];
|
||||
var supported = ['linux', 'darwin']
|
||||
|
||||
if (supported.indexOf(process.platform) == -1) {
|
||||
console.log("Unsupported platform: " + process.platform);
|
||||
console.log("Supported platforms are: " + supported.toString());
|
||||
process.exit(1);
|
||||
console.log('Unsupported platform: ' + process.platform);
|
||||
console.log('Supported platforms are: ' + supported.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var stopping = false;
|
||||
var listener = null;
|
||||
|
||||
var exitServiceAfterNFailures = Number(
|
||||
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
|
||||
);
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
|
||||
if (exitServiceAfterNFailures <= 0) {
|
||||
exitServiceAfterNFailures = NaN;
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log('Starting Runner listener interactively');
|
||||
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
|
||||
} else {
|
||||
console.log('Starting Runner listener with startup type: service');
|
||||
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
|
||||
}
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.stderr.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on('close', (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
if (code === 0) {
|
||||
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
|
||||
stopping = true;
|
||||
} else if (code === 1) {
|
||||
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
|
||||
stopping = true;
|
||||
} else if (code === 2) {
|
||||
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
|
||||
} else if (code === 3) {
|
||||
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
|
||||
} else {
|
||||
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
|
||||
}
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var consecutiveFailureCount = 0;
|
||||
|
||||
var gracefulShutdown = function () {
|
||||
console.log("Shutting down runner listener");
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
console.log("Sending SIGINT to runner listener to stop");
|
||||
listener.kill("SIGINT");
|
||||
|
||||
console.log("Sending SIGKILL to runner listener");
|
||||
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
|
||||
}
|
||||
};
|
||||
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log("Starting Runner listener interactively");
|
||||
listener = childProcess.spawn(listenerExePath, ["run"], {
|
||||
env: process.env,
|
||||
});
|
||||
} else {
|
||||
console.log("Starting Runner listener with startup type: service");
|
||||
listener = childProcess.spawn(
|
||||
listenerExePath,
|
||||
["run", "--startuptype", "service"],
|
||||
{ env: process.env }
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on("data", (data) => {
|
||||
if (data.toString("utf8").includes("Listening for Jobs")) {
|
||||
consecutiveFailureCount = 0;
|
||||
}
|
||||
process.stdout.write(data.toString("utf8"));
|
||||
});
|
||||
|
||||
listener.stderr.on("data", (data) => {
|
||||
process.stdout.write(data.toString("utf8"));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on("close", (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
if (code === 0) {
|
||||
console.log(
|
||||
"Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else if (code === 1) {
|
||||
console.log(
|
||||
"Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else if (code === 2) {
|
||||
console.log(
|
||||
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
} else if (code === 3 || code === 4) {
|
||||
console.log(
|
||||
"Runner listener exit because of updating, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
} else {
|
||||
var messagePrefix = "Runner listener exit with undefined return code";
|
||||
consecutiveFailureCount++;
|
||||
if (
|
||||
!isNaN(exitServiceAfterNFailures) &&
|
||||
consecutiveFailureCount >= exitServiceAfterNFailures
|
||||
) {
|
||||
console.error(
|
||||
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
|
||||
);
|
||||
gracefulShutdown();
|
||||
return;
|
||||
} else {
|
||||
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
runService();
|
||||
console.log("Started running service");
|
||||
console.log('Started running service');
|
||||
|
||||
process.on("SIGINT", () => {
|
||||
gracefulShutdown();
|
||||
var gracefulShutdown = function (code) {
|
||||
console.log('Shutting down runner listener');
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
console.log('Sending SIGINT to runner listener to stop');
|
||||
listener.kill('SIGINT');
|
||||
|
||||
console.log('Sending SIGKILL to runner listener');
|
||||
setTimeout(() => listener.kill('SIGKILL'), 30000);
|
||||
}
|
||||
}
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
gracefulShutdown(0);
|
||||
});
|
||||
|
||||
process.on("SIGTERM", () => {
|
||||
gracefulShutdown();
|
||||
process.on('SIGTERM', () => {
|
||||
gracefulShutdown(0);
|
||||
});
|
||||
|
||||
@@ -17,13 +17,7 @@ RUNNER_ROOT=`pwd`
|
||||
|
||||
LAUNCH_PATH="${HOME}/Library/LaunchAgents"
|
||||
PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist"
|
||||
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
|
||||
IS_CUSTOM_TEMPLATE=0
|
||||
if [[ -z $TEMPLATE_PATH ]]; then
|
||||
TEMPLATE_PATH=./bin/actions.runner.plist.template
|
||||
else
|
||||
IS_CUSTOM_TEMPLATE=1
|
||||
fi
|
||||
TEMPLATE_PATH=./bin/actions.runner.plist.template
|
||||
TEMP_PATH=./bin/actions.runner.plist.temp
|
||||
CONFIG_PATH=.service
|
||||
|
||||
@@ -35,11 +29,7 @@ function failed()
|
||||
}
|
||||
|
||||
if [ ! -f "${TEMPLATE_PATH}" ]; then
|
||||
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
else
|
||||
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
|
||||
fi
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
fi
|
||||
|
||||
function install()
|
||||
@@ -63,7 +53,7 @@ function install()
|
||||
mkdir -p "${log_path}" || failed "failed to create ${log_path}"
|
||||
|
||||
echo Creating ${PLIST_PATH}
|
||||
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
|
||||
|
||||
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
|
||||
|
||||
@@ -43,32 +43,6 @@ module.exports =
|
||||
/************************************************************************/
|
||||
/******/ ({
|
||||
|
||||
/***/ 82:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
function toCommandValue(input) {
|
||||
if (input === null || input === undefined) {
|
||||
return '';
|
||||
}
|
||||
else if (typeof input === 'string' || input instanceof String) {
|
||||
return input;
|
||||
}
|
||||
return JSON.stringify(input);
|
||||
}
|
||||
exports.toCommandValue = toCommandValue;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 87:
|
||||
/***/ (function(module) {
|
||||
|
||||
@@ -1004,42 +978,6 @@ function regExpEscape (s) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 102:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// For internal use, subject to change.
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
function issueCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||
}
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Missing file at path: ${filePath}`);
|
||||
}
|
||||
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
//# sourceMappingURL=file-command.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 281:
|
||||
@@ -1613,7 +1551,7 @@ function run() {
|
||||
}
|
||||
result.end();
|
||||
if (hasMatch) {
|
||||
console.log(`Found ${count} files to hash.`);
|
||||
console.log(`Find ${count} files to hash.`);
|
||||
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`);
|
||||
}
|
||||
else {
|
||||
@@ -1621,15 +1559,7 @@ function run() {
|
||||
}
|
||||
});
|
||||
}
|
||||
run()
|
||||
.then(out => {
|
||||
console.log(out);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
run();
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -1757,25 +1687,17 @@ module.exports = require("crypto");
|
||||
|
||||
"use strict";
|
||||
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
const os = __webpack_require__(87);
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ::name key=value,key=value::message
|
||||
* ##[name key=value;key=value]message
|
||||
*
|
||||
* Examples:
|
||||
* ::warning::This is the message
|
||||
* ::set-env name=MY_VAR::some value
|
||||
* ##[warning]This is the user warning message
|
||||
* ##[set-secret name=mypassword]definitelyNotAPassword!
|
||||
*/
|
||||
function issueCommand(command, properties, message) {
|
||||
const cmd = new Command(command, properties, message);
|
||||
@@ -1800,39 +1722,34 @@ class Command {
|
||||
let cmdStr = CMD_STRING + this.command;
|
||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||
cmdStr += ' ';
|
||||
let first = true;
|
||||
for (const key in this.properties) {
|
||||
if (this.properties.hasOwnProperty(key)) {
|
||||
const val = this.properties[key];
|
||||
if (val) {
|
||||
if (first) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
cmdStr += ',';
|
||||
}
|
||||
cmdStr += `${key}=${escapeProperty(val)}`;
|
||||
// safely append the val - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
cmdStr += `${key}=${escape(`${val || ''}`)},`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||||
cmdStr += CMD_STRING;
|
||||
// safely append the message - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
const message = `${this.message || ''}`;
|
||||
cmdStr += escapeData(message);
|
||||
return cmdStr;
|
||||
}
|
||||
}
|
||||
function escapeData(s) {
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A');
|
||||
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
|
||||
}
|
||||
function escapeProperty(s) {
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
function escape(s) {
|
||||
return s
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/:/g, '%3A')
|
||||
.replace(/,/g, '%2C');
|
||||
.replace(/]/g, '%5D')
|
||||
.replace(/;/g, '%3B');
|
||||
}
|
||||
//# sourceMappingURL=command.js.map
|
||||
|
||||
@@ -1852,19 +1769,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const command_1 = __webpack_require__(431);
|
||||
const file_command_1 = __webpack_require__(102);
|
||||
const utils_1 = __webpack_require__(82);
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
const os = __webpack_require__(87);
|
||||
const path = __webpack_require__(622);
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
@@ -1885,21 +1793,11 @@ var ExitCode;
|
||||
/**
|
||||
* Sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||
* @param val the value of the variable
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function exportVariable(name, val) {
|
||||
const convertedVal = utils_1.toCommandValue(val);
|
||||
process.env[name] = convertedVal;
|
||||
const filePath = process.env['GITHUB_ENV'] || '';
|
||||
if (filePath) {
|
||||
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||||
file_command_1.issueCommand('ENV', commandValue);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||
}
|
||||
process.env[name] = val;
|
||||
command_1.issueCommand('set-env', { name }, val);
|
||||
}
|
||||
exports.exportVariable = exportVariable;
|
||||
/**
|
||||
@@ -1915,13 +1813,7 @@ exports.setSecret = setSecret;
|
||||
* @param inputPath
|
||||
*/
|
||||
function addPath(inputPath) {
|
||||
const filePath = process.env['GITHUB_PATH'] || '';
|
||||
if (filePath) {
|
||||
file_command_1.issueCommand('PATH', inputPath);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
}
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||||
}
|
||||
exports.addPath = addPath;
|
||||
@@ -1944,22 +1836,12 @@ exports.getInput = getInput;
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
* @param value value to store
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function setOutput(name, value) {
|
||||
command_1.issueCommand('set-output', { name }, value);
|
||||
}
|
||||
exports.setOutput = setOutput;
|
||||
/**
|
||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||
*
|
||||
*/
|
||||
function setCommandEcho(enabled) {
|
||||
command_1.issue('echo', enabled ? 'on' : 'off');
|
||||
}
|
||||
exports.setCommandEcho = setCommandEcho;
|
||||
//-----------------------------------------------------------------------
|
||||
// Results
|
||||
//-----------------------------------------------------------------------
|
||||
@@ -1976,13 +1858,6 @@ exports.setFailed = setFailed;
|
||||
//-----------------------------------------------------------------------
|
||||
// Logging Commands
|
||||
//-----------------------------------------------------------------------
|
||||
/**
|
||||
* Gets whether Actions Step Debug is on or not
|
||||
*/
|
||||
function isDebug() {
|
||||
return process.env['RUNNER_DEBUG'] === '1';
|
||||
}
|
||||
exports.isDebug = isDebug;
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
@@ -1993,18 +1868,18 @@ function debug(message) {
|
||||
exports.debug = debug;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message. Errors will be converted to string via toString()
|
||||
* @param message error issue message
|
||||
*/
|
||||
function error(message) {
|
||||
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
||||
command_1.issue('error', message);
|
||||
}
|
||||
exports.error = error;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message. Errors will be converted to string via toString()
|
||||
* @param message warning issue message
|
||||
*/
|
||||
function warning(message) {
|
||||
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
||||
command_1.issue('warning', message);
|
||||
}
|
||||
exports.warning = warning;
|
||||
/**
|
||||
@@ -2062,9 +1937,8 @@ exports.group = group;
|
||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||
*
|
||||
* @param name name of the state to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
* @param value value to store
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function saveState(name, value) {
|
||||
command_1.issueCommand('save-state', { name }, value);
|
||||
}
|
||||
|
||||
@@ -10,11 +10,10 @@ if [ -f ".path" ]; then
|
||||
echo ".path=${PATH}"
|
||||
fi
|
||||
|
||||
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
|
||||
# run the host process which keep the listener alive
|
||||
./externals/$nodever/bin/node ./bin/RunnerService.js &
|
||||
./externals/node12/bin/node ./bin/RunnerService.js &
|
||||
PID=$!
|
||||
wait $PID
|
||||
trap - TERM INT
|
||||
|
||||
@@ -10,13 +10,7 @@ arg_2=${2}
|
||||
RUNNER_ROOT=`pwd`
|
||||
|
||||
UNIT_PATH=/etc/systemd/system/${SVC_NAME}
|
||||
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
|
||||
IS_CUSTOM_TEMPLATE=0
|
||||
if [[ -z $TEMPLATE_PATH ]]; then
|
||||
TEMPLATE_PATH=./bin/actions.runner.service.template
|
||||
else
|
||||
IS_CUSTOM_TEMPLATE=1
|
||||
fi
|
||||
TEMPLATE_PATH=./bin/actions.runner.service.template
|
||||
TEMP_PATH=./bin/actions.runner.service.temp
|
||||
CONFIG_PATH=.service
|
||||
|
||||
@@ -37,11 +31,7 @@ function failed()
|
||||
}
|
||||
|
||||
if [ ! -f "${TEMPLATE_PATH}" ]; then
|
||||
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
else
|
||||
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
|
||||
fi
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
fi
|
||||
|
||||
#check if we run as root
|
||||
|
||||
@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
|
||||
while [ -e /proc/$runnerpid ]
|
||||
do
|
||||
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
|
||||
"$rootfolder"/safe_sleep.sh 2
|
||||
sleep 2
|
||||
done
|
||||
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
|
||||
|
||||
# start re-organize folders
|
||||
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
|
||||
"$rootfolder"/safe_sleep.sh 1
|
||||
sleep 1
|
||||
|
||||
# the folder structure under runner root will be
|
||||
# ./bin -> bin.2.100.0 (junction folder)
|
||||
@@ -125,7 +125,7 @@ attemptedtargetedfix=0
|
||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||
# We will recreate the ./externals/node12/bin/node of the past runner version that launched the runnerlistener service
|
||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||
|
||||
# We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid
|
||||
@@ -135,13 +135,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
# inspect the open file handles to find the node process
|
||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
fi
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep node12/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -eq 0 && -n "$path" ]]
|
||||
then
|
||||
# trim the last 5 characters of the path '/node'
|
||||
@@ -154,7 +148,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1
|
||||
mkdir -p "$trimmedpath"
|
||||
cp "$rootfolder/externals/$nodever/bin/node" "$path"
|
||||
cp "$rootfolder/externals/node12/bin/node" "$path"
|
||||
else
|
||||
date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1
|
||||
fi
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
@echo off
|
||||
|
||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 2 (
|
||||
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 3 (
|
||||
rem Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 4 (
|
||||
rem Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo "Exiting after unknown error code: %ERRORLEVEL%"
|
||||
exit /b 0
|
||||
@@ -1,46 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Validate not sudo
|
||||
user_id=`id -u`
|
||||
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
echo "Must not run interactively with sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run
|
||||
shopt -s nocasematch
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 0 ]]; then
|
||||
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
exit 0
|
||||
elif [[ $returnCode == 1 ]]; then
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit 0
|
||||
elif [[ $returnCode == 2 ]]; then
|
||||
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
elif [[ $returnCode == 3 ]]; then
|
||||
# Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
elif [[ $returnCode == 4 ]]; then
|
||||
# Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
else
|
||||
echo "Exiting with unknown error code: ${returnCode}"
|
||||
exit 0
|
||||
fi
|
||||
@@ -13,19 +13,21 @@ if defined VERBOSE_ARG (
|
||||
rem Unblock files in the root of the layout folder. E.g. .cmd files.
|
||||
powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "$VerbosePreference = %VERBOSE_ARG% ; Get-ChildItem -LiteralPath '%~dp0' | ForEach-Object { Write-Verbose ('Unblock: {0}' -f $_.FullName) ; $_ } | Unblock-File | Out-Null"
|
||||
|
||||
if /i "%~1" equ "localRun" (
|
||||
rem ********************************************************************************
|
||||
rem Local run.
|
||||
rem ********************************************************************************
|
||||
"%~dp0bin\Runner.Listener.exe" %*
|
||||
) else (
|
||||
rem ********************************************************************************
|
||||
rem Run.
|
||||
rem ********************************************************************************
|
||||
"%~dp0bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem ********************************************************************************
|
||||
rem Run.
|
||||
rem ********************************************************************************
|
||||
|
||||
:launch_helper
|
||||
copy "%~dp0run-helper.cmd.template" "%~dp0run-helper.cmd" /Y
|
||||
call "%~dp0run-helper.cmd" %*
|
||||
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Restarting runner..."
|
||||
goto :launch_helper
|
||||
) else (
|
||||
echo "Exiting runner..."
|
||||
exit /b 0
|
||||
rem Return code 4 means the run once runner received an update message.
|
||||
rem Sleep 5 seconds to wait for the update process finish and run the runner again.
|
||||
if ERRORLEVEL 4 (
|
||||
timeout /t 5 /nobreak > NUL
|
||||
"%~dp0bin\Runner.Listener.exe" run %*
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,24 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Validate not sudo
|
||||
user_id=`id -u`
|
||||
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
echo "Must not run interactively with sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change directory to the script root directory
|
||||
# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
# run the helper process which keep the listener alive
|
||||
while :;
|
||||
do
|
||||
"$DIR"/run-helper.sh $*
|
||||
|
||||
# Do not "cd $DIR". For localRun, the current directory is expected to be the repo location on disk.
|
||||
|
||||
# Run
|
||||
shopt -s nocasematch
|
||||
if [[ "$1" == "localRun" ]]; then
|
||||
"$DIR"/bin/Runner.Listener $*
|
||||
else
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
# Return code 3 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
if [[ $returnCode == 3 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5
|
||||
fi
|
||||
elif [[ $returnCode == 4 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5
|
||||
fi
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
exit 0
|
||||
exit $returnCode
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
SECONDS=0
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
:
|
||||
done
|
||||
@@ -33,9 +33,6 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string PoolName { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool DisableUpdate { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool Ephemeral { get; set; }
|
||||
|
||||
|
||||
@@ -129,7 +129,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string Ephemeral = "ephemeral";
|
||||
public static readonly string Help = "help";
|
||||
public static readonly string Replace = "replace";
|
||||
public static readonly string DisableUpdate = "disableupdate";
|
||||
public static readonly string Once = "once"; // Keep this around since customers still relies on it
|
||||
public static readonly string RunAsService = "runasservice";
|
||||
public static readonly string Unattended = "unattended";
|
||||
@@ -156,8 +155,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
|
||||
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
|
||||
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -189,12 +187,6 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string Success = "success";
|
||||
}
|
||||
|
||||
public static class Hooks
|
||||
{
|
||||
public static readonly string JobStartedStepName = "Set up runner";
|
||||
public static readonly string JobCompletedStepName = "Complete runner";
|
||||
}
|
||||
|
||||
public static class Path
|
||||
{
|
||||
public static readonly string ActionsDirectory = "_actions";
|
||||
@@ -226,15 +218,11 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class Agent
|
||||
{
|
||||
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
|
||||
|
||||
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class System
|
||||
|
||||
@@ -60,7 +60,6 @@ namespace GitHub.Runner.Common
|
||||
case "GitHub.Runner.Worker.IFileCommandExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.CreateStepSummaryCommand, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Listener.Check.ICheckExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");
|
||||
|
||||
@@ -193,11 +193,6 @@ namespace GitHub.Runner.Common
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
}
|
||||
|
||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
if (File.Exists(credFile))
|
||||
{
|
||||
@@ -216,8 +211,6 @@ namespace GitHub.Runner.Common
|
||||
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
|
||||
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
|
||||
}
|
||||
|
||||
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
|
||||
}
|
||||
|
||||
public string GetDirectory(WellKnownDirectory directory)
|
||||
@@ -357,7 +350,7 @@ namespace GitHub.Runner.Common
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".setup_info");
|
||||
break;
|
||||
|
||||
|
||||
case WellKnownConfigFile.Telemetry:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Diag),
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
using System;
|
||||
using System.Net.Http;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
@@ -14,14 +13,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
|
||||
{
|
||||
var client = new HttpClientHandler() { Proxy = webProxy };
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
client.ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
|
||||
return client;
|
||||
return new HttpClientHandler() { Proxy = webProxy };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,36 +1,32 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Net.Http;
|
||||
using System.Net.WebSockets;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
[ServiceLocator(Default = typeof(JobServer))]
|
||||
public interface IJobServer : IRunnerService, IAsyncDisposable
|
||||
public interface IJobServer : IRunnerService
|
||||
{
|
||||
Task ConnectAsync(VssConnection jobConnection);
|
||||
|
||||
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
|
||||
|
||||
// logging and console
|
||||
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
|
||||
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
||||
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
|
||||
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class JobServer : RunnerService, IJobServer
|
||||
@@ -38,20 +34,6 @@ namespace GitHub.Runner.Common
|
||||
private bool _hasConnection;
|
||||
private VssConnection _connection;
|
||||
private TaskHttpClient _taskClient;
|
||||
private ClientWebSocket _websocketClient;
|
||||
|
||||
private ServiceEndpoint _serviceEndpoint;
|
||||
|
||||
private int totalBatchedLinesAttemptedByWebsocket = 0;
|
||||
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
|
||||
|
||||
|
||||
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
|
||||
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
|
||||
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
|
||||
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
|
||||
|
||||
private Task _websocketConnectTask;
|
||||
|
||||
public async Task ConnectAsync(VssConnection jobConnection)
|
||||
{
|
||||
@@ -60,7 +42,7 @@ namespace GitHub.Runner.Common
|
||||
int attemptCount = totalAttempts;
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var runnerSettings = configurationStore.GetSettings();
|
||||
|
||||
|
||||
while (!_connection.HasAuthenticated && attemptCount-- > 0)
|
||||
{
|
||||
try
|
||||
@@ -135,19 +117,6 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
|
||||
{
|
||||
this._serviceEndpoint = serviceEndpoint;
|
||||
InitializeWebsocketClient(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
|
||||
GC.SuppressFinalize(this);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private void CheckConnection()
|
||||
{
|
||||
if (!_hasConnection)
|
||||
@@ -156,48 +125,6 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeWebsocketClient(TimeSpan delay)
|
||||
{
|
||||
if (_serviceEndpoint.Authorization != null &&
|
||||
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
|
||||
!string.IsNullOrEmpty(accessToken))
|
||||
{
|
||||
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
|
||||
{
|
||||
// let's ensure we use the right scheme
|
||||
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
|
||||
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
|
||||
this._websocketClient = new ClientWebSocket();
|
||||
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
|
||||
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"No access token from the service endpoint");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Attempting to start websocket client with delay {delay}.");
|
||||
await Task.Delay(delay);
|
||||
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
|
||||
Trace.Info($"Successfully started websocket client.");
|
||||
}
|
||||
catch(Exception ex)
|
||||
{
|
||||
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Feedback: WebConsole, TimelineRecords and Logs
|
||||
//-----------------------------------------------------------------
|
||||
@@ -208,72 +135,16 @@ namespace GitHub.Runner.Common
|
||||
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var pushedLinesViaWebsocket = false;
|
||||
if (_websocketConnectTask != null)
|
||||
{
|
||||
await _websocketConnectTask;
|
||||
}
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
|
||||
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
|
||||
if (_websocketClient != null)
|
||||
{
|
||||
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
|
||||
var jsonData = StringUtil.ConvertToJson(linesWrapper);
|
||||
try
|
||||
{
|
||||
totalBatchedLinesAttemptedByWebsocket++;
|
||||
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
|
||||
// break the message into chunks of 1024 bytes
|
||||
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
|
||||
{
|
||||
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
|
||||
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
|
||||
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
|
||||
}
|
||||
|
||||
pushedLinesViaWebsocket = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failedAttemptsToPostBatchedLinesByWebsocket++;
|
||||
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
|
||||
Trace.Error(ex);
|
||||
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
|
||||
{
|
||||
// let's consider failure percentage
|
||||
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
|
||||
{
|
||||
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
|
||||
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
|
||||
// By setting it to null, we will ensure that we never try websocket path again for this job
|
||||
_websocketClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (_websocketClient != null)
|
||||
{
|
||||
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
|
||||
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
|
||||
InitializeWebsocketClient(delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!pushedLinesViaWebsocket)
|
||||
{
|
||||
if (startLine.HasValue)
|
||||
{
|
||||
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
||||
@@ -315,10 +186,10 @@ namespace GitHub.Runner.Common
|
||||
//-----------------------------------------------------------------
|
||||
// Action download info
|
||||
//-----------------------------------------------------------------
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, jobId, actions, cancellationToken: cancellationToken);
|
||||
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Concurrent;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -71,11 +72,10 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||
// Then the dequeue will happen every 500ms.
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
// at the same time we can cut the load to server after the build run for more than 60s
|
||||
private int _webConsoleLineAggressiveDequeueCount = 0;
|
||||
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
|
||||
private const int _webConsoleLineQueueSizeLimit = 1024;
|
||||
private bool _webConsoleLineAggressiveDequeue = true;
|
||||
private bool _firstConsoleOutputs = true;
|
||||
|
||||
@@ -89,10 +89,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||
|
||||
if (_queueInProcess)
|
||||
{
|
||||
Trace.Info("No-opt, all queue process tasks are running.");
|
||||
@@ -160,28 +156,13 @@ namespace GitHub.Runner.Common
|
||||
await ProcessTimelinesUpdateQueueAsync(runOnce: true);
|
||||
Trace.Info("Timeline update queue drained.");
|
||||
|
||||
Trace.Info($"Disposing job server ...");
|
||||
await _jobServer.DisposeAsync();
|
||||
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
{
|
||||
// We only process 500 lines of the queue everytime.
|
||||
// If the queue is backing up due to slow Http request or flood of output from step,
|
||||
// we will drop the output to avoid extra memory consumption from the runner since the live console feed is best effort.
|
||||
if (!string.IsNullOrEmpty(line) && _webConsoleLineQueue.Count < _webConsoleLineQueueSizeLimit)
|
||||
{
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
if (line.Length > 1024)
|
||||
{
|
||||
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
|
||||
line = $"{line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
}
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
}
|
||||
|
||||
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
||||
@@ -249,6 +230,12 @@ namespace GitHub.Runner.Common
|
||||
stepRecordIds.Add(lineInfo.StepRecordId);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(lineInfo.Line) && lineInfo.Line.Length > 1024)
|
||||
{
|
||||
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
|
||||
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
|
||||
linesCounter++;
|
||||
|
||||
@@ -299,7 +286,15 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
|
||||
// we will not requeue failed batch, since the web console lines are time sensitive.
|
||||
if (batch[0].LineNumber.HasValue)
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
|
||||
}
|
||||
else
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
|
||||
}
|
||||
|
||||
if (_firstConsoleOutputs)
|
||||
{
|
||||
@@ -488,8 +483,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
if (runOnce)
|
||||
{
|
||||
// continue process timeline records update,
|
||||
// we might have more records need update,
|
||||
// continue process timeline records update,
|
||||
// we might have more records need update,
|
||||
// since we just create a new sub-timeline
|
||||
if (pendingSubtimelineUpdate)
|
||||
{
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class NodeUtil
|
||||
{
|
||||
private const string _defaultNodeVersion = "node16";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"});
|
||||
public static string GetInternalNodeVersion()
|
||||
{
|
||||
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,6 @@ using System.Net.NetworkInformation;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
@@ -315,12 +314,12 @@ namespace GitHub.Runner.Listener.Check
|
||||
});
|
||||
|
||||
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
|
||||
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' ");
|
||||
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
hostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node,
|
||||
node12,
|
||||
$"\"{downloadCertScript}\"",
|
||||
env,
|
||||
true,
|
||||
|
||||
@@ -6,7 +6,6 @@ using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
@@ -145,12 +144,12 @@ namespace GitHub.Runner.Listener.Check
|
||||
});
|
||||
|
||||
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
|
||||
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' ");
|
||||
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node,
|
||||
node12,
|
||||
$"\"{makeWebRequestScript}\"",
|
||||
env,
|
||||
true,
|
||||
|
||||
@@ -29,7 +29,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
Constants.Runner.CommandLine.Flags.Check,
|
||||
Constants.Runner.CommandLine.Flags.Commit,
|
||||
Constants.Runner.CommandLine.Flags.DisableUpdate,
|
||||
Constants.Runner.CommandLine.Flags.Ephemeral,
|
||||
Constants.Runner.CommandLine.Flags.Help,
|
||||
Constants.Runner.CommandLine.Flags.Once,
|
||||
@@ -69,7 +68,6 @@ namespace GitHub.Runner.Listener
|
||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
|
||||
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
|
||||
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
|
||||
|
||||
// Keep this around since customers still relies on it
|
||||
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
|
||||
|
||||
@@ -54,7 +54,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Info(nameof(LoadSettings));
|
||||
if (!IsConfigured())
|
||||
{
|
||||
throw new NonRetryableException("Not configured. Run config.(sh/cmd) to configure the runner.");
|
||||
throw new InvalidOperationException("Not configured. Run config.(sh/cmd) to configure the runner.");
|
||||
}
|
||||
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
@@ -196,7 +196,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
TaskAgent agent;
|
||||
while (true)
|
||||
{
|
||||
runnerSettings.DisableUpdate = command.DisableUpdate;
|
||||
runnerSettings.Ephemeral = command.Ephemeral;
|
||||
runnerSettings.AgentName = command.GetRunnerName();
|
||||
|
||||
@@ -214,22 +213,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (command.GetReplace())
|
||||
{
|
||||
// Update existing agent with new PublicKey, agent version.
|
||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
|
||||
|
||||
try
|
||||
{
|
||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||
if (command.DisableUpdate &&
|
||||
command.DisableUpdate != agent.DisableUpdate)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
|
||||
}
|
||||
if (command.Ephemeral &&
|
||||
command.Ephemeral != agent.Ephemeral)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
|
||||
}
|
||||
|
||||
_term.WriteSuccessMessage("Successfully replaced the runner");
|
||||
break;
|
||||
}
|
||||
@@ -248,22 +236,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
// Create a new agent.
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
|
||||
|
||||
try
|
||||
{
|
||||
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
|
||||
if (command.DisableUpdate &&
|
||||
command.DisableUpdate != agent.DisableUpdate)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
|
||||
}
|
||||
if (command.Ephemeral &&
|
||||
command.Ephemeral != agent.Ephemeral)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
|
||||
}
|
||||
|
||||
_term.WriteSuccessMessage("Runner successfully added");
|
||||
break;
|
||||
}
|
||||
@@ -489,7 +466,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
|
||||
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
|
||||
{
|
||||
ArgUtil.NotNull(agent, nameof(agent));
|
||||
agent.Authorization = new TaskAgentAuthorization
|
||||
@@ -501,7 +478,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
agent.Version = BuildConstants.RunnerPackage.Version;
|
||||
agent.OSDescription = RuntimeInformation.OSDescription;
|
||||
agent.Ephemeral = ephemeral;
|
||||
agent.DisableUpdate = disableUpdate;
|
||||
agent.MaxParallelism = 1;
|
||||
|
||||
agent.Labels.Clear();
|
||||
@@ -518,7 +494,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return agent;
|
||||
}
|
||||
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
|
||||
{
|
||||
TaskAgent agent = new TaskAgent(agentName)
|
||||
{
|
||||
@@ -530,7 +506,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
Ephemeral = ephemeral,
|
||||
DisableUpdate = disableUpdate
|
||||
};
|
||||
|
||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||
@@ -613,52 +588,32 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
|
||||
}
|
||||
|
||||
int retryCount = 0;
|
||||
while(retryCount < 3)
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
try
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
catch(Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Retrying in 5 seconds");
|
||||
}
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
}
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
@@ -674,57 +629,35 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
|
||||
}
|
||||
|
||||
int retryCount = 0;
|
||||
while (retryCount < 3)
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
{
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
|
||||
try
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
|
||||
if(response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
|
||||
}
|
||||
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
catch(Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Retrying in 5 seconds");
|
||||
}
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
}
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,7 +142,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Entering();
|
||||
LocalGroupInfo groupInfo = new LocalGroupInfo();
|
||||
groupInfo.Name = groupName;
|
||||
groupInfo.Comment = StringUtil.Format("Built-in group used by GitHub Actions Runner.");
|
||||
groupInfo.Comment = StringUtil.Format("Built-in group used by Team Foundation Server.");
|
||||
|
||||
int returnCode = NetLocalGroupAdd(null, // computer name
|
||||
1, // 1 means include comment
|
||||
|
||||
@@ -48,12 +48,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
|
||||
|
||||
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
|
||||
if (serviceName.Length > MaxServiceNameLength)
|
||||
|
||||
if (serviceName.Length > 80)
|
||||
{
|
||||
Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name.");
|
||||
// Add 5 to add -xxxx random number on the end
|
||||
int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5;
|
||||
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters);
|
||||
Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name.");
|
||||
|
||||
int exceededCharLength = serviceName.Length - 80;
|
||||
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45);
|
||||
|
||||
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
|
||||
|
||||
@@ -65,10 +66,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
|
||||
}
|
||||
|
||||
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
|
||||
var random = new Random();
|
||||
var num = random.Next(1000, 9999).ToString();
|
||||
runnerNameSubstring +=$"-{num}";
|
||||
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
|
||||
}
|
||||
|
||||
@@ -76,12 +73,5 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
|
||||
}
|
||||
#if (OS_LINUX || OS_OSX)
|
||||
const int MaxServiceNameLength = 150;
|
||||
const int MaxRepoOrgCharacters = 70;
|
||||
#elif OS_WINDOWS
|
||||
const int MaxServiceNameLength = 80;
|
||||
const int MaxRepoOrgCharacters = 45;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,19 +2,17 @@ using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using System.Linq;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -36,7 +34,6 @@ namespace GitHub.Runner.Listener
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
private static Regex _invalidJsonRegex = new Regex(@"invalid\ Json\ at\ position\ '(\d+)':", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
private readonly Lazy<Dictionary<long, TaskResult>> _localRunJobResult = new Lazy<Dictionary<long, TaskResult>>();
|
||||
private int _poolId;
|
||||
|
||||
@@ -285,7 +282,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||
// no need to localize the exception string should never happen.
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds.");
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -346,10 +343,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
var term = HostContext.GetService<ITerminal>();
|
||||
|
||||
string workflowName = message.Variables["system.workflowFilePath"].Value.Split('/').LastOrDefault();
|
||||
string additionalInfo = string.IsNullOrEmpty(workflowName) ? $"(in repository \"{_runnerSettings.RepoOrOrgName}\")" : $"(workflow \"{workflowName}\" in repository \"{_runnerSettings.RepoOrOrgName}\")";
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Running job: \"{message.JobDisplayName}\" {additionalInfo}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
|
||||
|
||||
// first job request renew succeed.
|
||||
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
|
||||
@@ -366,7 +360,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
|
||||
// wait till first renew succeed or job request is cancelled
|
||||
// wait till first renew succeed or job request is canceled
|
||||
// not even start worker if the first renew fail
|
||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||
|
||||
@@ -534,7 +528,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {result}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
|
||||
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
@@ -630,7 +624,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {resultOnAbandonOrCancel}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
|
||||
// complete job request with cancel result, stop renew lock, job has finished.
|
||||
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
@@ -707,7 +701,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
|
||||
// Stop renew only on cancellation token fired.
|
||||
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
||||
return;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -765,7 +759,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -970,30 +964,6 @@ namespace GitHub.Runner.Listener
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
try
|
||||
{
|
||||
if (!string.IsNullOrEmpty(errorMessage) &&
|
||||
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
|
||||
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
|
||||
{
|
||||
// the trace should be best effort and not affect any job result
|
||||
var match = _invalidJsonRegex.Match(errorMessage);
|
||||
if (match.Success &&
|
||||
match.Groups.Count == 2)
|
||||
{
|
||||
var jsonPosition = int.Parse(match.Groups[1].Value);
|
||||
var serializedJobMessage = JsonUtility.ToString(message);
|
||||
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
|
||||
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error(ex);
|
||||
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
|
||||
}
|
||||
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Services.Common;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using System.Security.Cryptography;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using GitHub.Services.OAuth;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -33,7 +33,6 @@ namespace GitHub.Runner.Listener
|
||||
private IRunnerServer _runnerServer;
|
||||
private TaskAgentSession _session;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private bool _accessTokenRevoked = false;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
@@ -112,7 +111,6 @@ namespace GitHub.Runner.Listener
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -156,16 +154,9 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (!_accessTokenRevoked)
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -214,7 +205,6 @@ namespace GitHub.Runner.Listener
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
@@ -12,7 +12,6 @@ using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -319,7 +318,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
IJobDispatcher jobDispatcher = null;
|
||||
CancellationTokenSource messageQueueLoopTokenSource = CancellationTokenSource.CreateLinkedTokenSource(HostContext.RunnerShutdownToken);
|
||||
|
||||
|
||||
// Should we try to cleanup ephemeral runners
|
||||
bool runOnceJobCompleted = false;
|
||||
try
|
||||
@@ -408,29 +407,8 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
#if DEBUG
|
||||
// Can mock the update for testing
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
{
|
||||
|
||||
// The mock_update_messages.json file should be an object with keys being the current version and values being the targeted mock version object
|
||||
// Example: { "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}
|
||||
var mockUpdatesPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "mock_update_messages.json");
|
||||
if (File.Exists(mockUpdatesPath))
|
||||
{
|
||||
var mockUpdateMessages = JsonUtility.FromString<Dictionary<string, AgentRefreshMessage>>(File.ReadAllText(mockUpdatesPath));
|
||||
if (mockUpdateMessages.ContainsKey(BuildConstants.RunnerPackage.Version))
|
||||
{
|
||||
var mockTargetVersion = mockUpdateMessages[BuildConstants.RunnerPackage.Version].TargetVersion;
|
||||
_term.WriteLine($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
|
||||
Trace.Info($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
|
||||
runnerUpdateMessage = new AgentRefreshMessage(runnerUpdateMessage.AgentId, mockTargetVersion, runnerUpdateMessage.Timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
var selfUpdater = HostContext.GetService<ISelfUpdater>();
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, !runOnce && HostContext.StartupType != StartupType.Service, HostContext.RunnerShutdownToken);
|
||||
Trace.Info("Refresh message received, kick-off selfupdate background process.");
|
||||
}
|
||||
else
|
||||
@@ -447,7 +425,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Received job message of length {message.Body.Length} from service, with hash '{IOUtil.GetSha256Hash(message.Body)}'");
|
||||
var jobMessage = StringUtil.ConvertFromJson<Pipelines.AgentJobRequestMessage>(message.Body);
|
||||
jobDispatcher.Run(jobMessage, runOnce);
|
||||
if (runOnce)
|
||||
@@ -562,7 +539,6 @@ Config Options:
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)
|
||||
--pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check`
|
||||
--disableupdate Disable self-hosted runner automatic update to the latest released version`
|
||||
--ephemeral Configure the runner to only take one job and then let the service un-configure the runner after the job finishes (default false)");
|
||||
|
||||
#if OS_WINDOWS
|
||||
@@ -570,7 +546,7 @@ Config Options:
|
||||
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
_term.WriteLine($@"
|
||||
Examples:
|
||||
Check GitHub server network connectivity:
|
||||
.{separator}run.{ext} --check --url <url> --pat <pat>
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using System.Security.Cryptography;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Text;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -30,19 +30,13 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
private static string _packageType = "agent";
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private static string _dotnetRuntime = "dotnetRuntime";
|
||||
private static string _externals = "externals";
|
||||
private readonly Dictionary<string, string> _contentHashes = new Dictionary<string, string>();
|
||||
|
||||
private PackageMetadata _targetPackage;
|
||||
private ITerminal _terminal;
|
||||
private IRunnerServer _runnerServer;
|
||||
private int _poolId;
|
||||
private int _agentId;
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new ConcurrentQueue<string>();
|
||||
private Task _cloneAndCalculateContentHashTask;
|
||||
private string _dotnetRuntimeCloneDirectory;
|
||||
private string _externalsCloneDirectory;
|
||||
private readonly List<string> _updateTrace = new List<string>();
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
@@ -56,8 +50,6 @@ namespace GitHub.Runner.Listener
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
|
||||
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
@@ -67,13 +59,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
// Copy dotnet runtime and externals of current runner to a temp folder
|
||||
// So we can re-use them with trimmed runner package, if possible.
|
||||
// This process is best effort, if we can't use trimmed runner package,
|
||||
// we will just go with the full package.
|
||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
|
||||
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
@@ -81,31 +66,13 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
Trace.Info($"An update is available.");
|
||||
_updateTrace.Enqueue($"RunnerPlatform: {_targetPackage.Platform}");
|
||||
_updateTrace.Add($"RunnerPlatform: {_targetPackage.Platform}");
|
||||
|
||||
// Print console line that warn user not shutdown runner.
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
|
||||
if (_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
// wait for cloning assets task to finish only if we have trimmed packages
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
else
|
||||
{
|
||||
linkedTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
||||
await DownloadLatestRunner(token);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
// wait till all running job finish
|
||||
@@ -121,39 +88,34 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Delete old version runner backup.");
|
||||
stopWatch.Stop();
|
||||
// generate update script from template
|
||||
_updateTrace.Enqueue($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Add($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
|
||||
|
||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
|
||||
Trace.Info($"Generate update script into: {updateScript}");
|
||||
|
||||
|
||||
// For L0, we will skip execute update script.
|
||||
if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT")))
|
||||
{
|
||||
// kick off update script
|
||||
Process invokeScript = new Process();
|
||||
// kick off update script
|
||||
Process invokeScript = new Process();
|
||||
#if OS_WINDOWS
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||
#elif (OS_OSX || OS_LINUX)
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
#endif
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
}
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
|
||||
totalUpdateTime.Stop();
|
||||
|
||||
_updateTrace.Enqueue($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Add($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
|
||||
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should be back online within 10 seconds.");
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_updateTrace.Enqueue(ex.ToString());
|
||||
_updateTrace.Add(ex.ToString());
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
@@ -207,7 +169,7 @@ namespace GitHub.Runner.Listener
|
||||
/// </summary>
|
||||
/// <param name="token"></param>
|
||||
/// <returns></returns>
|
||||
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion)
|
||||
private async Task DownloadLatestRunner(CancellationToken token)
|
||||
{
|
||||
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
@@ -216,117 +178,21 @@ namespace GitHub.Runner.Listener
|
||||
string archiveFile = null;
|
||||
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
var packageHashValue = _targetPackage.HashValue;
|
||||
var runtimeTrimmed = false;
|
||||
var externalsTrimmed = false;
|
||||
var fallbackToFullPackage = false;
|
||||
|
||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
||||
if (_contentHashes.Count == 2 &&
|
||||
_contentHashes.ContainsKey(_dotnetRuntime) &&
|
||||
_contentHashes.ContainsKey(_externals) &&
|
||||
_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
|
||||
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
|
||||
// Try to see whether we can use any size trimmed down package to speed up runner updates.
|
||||
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
|
||||
{
|
||||
if (trimmedPackage.TrimmedContents.Count == 2 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
|
||||
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
runtimeTrimmed = true;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else if (trimmedPackage.TrimmedContents.Count == 1 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
|
||||
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
|
||||
_updateTrace.Add($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
#if DEBUG
|
||||
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
|
||||
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
|
||||
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
|
||||
// Wrapped in #if DEBUG as this should not be in the RELEASE build
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
{
|
||||
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
|
||||
if (waitForDebugger)
|
||||
{
|
||||
int waitInSeconds = 20;
|
||||
while (!Debugger.IsAttached && waitInSeconds-- > 0)
|
||||
{
|
||||
await Task.Delay(1000);
|
||||
}
|
||||
Debugger.Break();
|
||||
}
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (_targetPackage.Platform.StartsWith("win"))
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
|
||||
}
|
||||
|
||||
if (File.Exists(archiveFile))
|
||||
{
|
||||
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
|
||||
}
|
||||
else
|
||||
{
|
||||
archiveFile = null;
|
||||
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// archiveFile is not null only if we mocked it above
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
|
||||
{
|
||||
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
|
||||
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
@@ -345,74 +211,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
var trimmedPackageRestoreTasks = new List<Task<bool>>();
|
||||
if (!fallbackToFullPackage)
|
||||
{
|
||||
// Skip restoring externals and runtime if we are going to fullback to the full package.
|
||||
if (externalsTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
|
||||
}
|
||||
if (runtimeTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmedPackageRestoreTasks.Count > 0)
|
||||
{
|
||||
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
|
||||
if (restoreResults.Any(x => x == false))
|
||||
{
|
||||
// if any of the restore failed, fallback to full package.
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (fallbackToFullPackage)
|
||||
{
|
||||
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
|
||||
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
|
||||
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
packageHashValue = _targetPackage.HashValue;
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
||||
}
|
||||
|
||||
@@ -497,14 +295,14 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Download runner: finished download");
|
||||
downloadSucceeded = true;
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Enqueue($"Attempts: {attempt}");
|
||||
_updateTrace.Enqueue($"PackageSize: {downloadSize / 1024 / 1024}MB");
|
||||
_updateTrace.Add($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Add($"Attempts: {attempt}");
|
||||
_updateTrace.Add($"PackageSize: {downloadSize / 1024 / 1024}MB");
|
||||
break;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"Runner download has been cancelled.");
|
||||
Trace.Info($"Runner download has been canceled.");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -544,12 +342,12 @@ namespace GitHub.Runner.Listener
|
||||
if (hash != packageHashValue)
|
||||
{
|
||||
// Hash did not match, we can't recover from this, just throw
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {archiveFile}");
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {_targetPackage.Filename}");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Validated Runner Hash matches {archiveFile} : {packageHashValue}");
|
||||
_updateTrace.Enqueue($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
Trace.Info($"Validated Runner Hash matches {_targetPackage.Filename} : {packageHashValue}");
|
||||
_updateTrace.Add($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -605,7 +403,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
stopWatch.Stop();
|
||||
Trace.Info($"Finished getting latest runner package at: {extractDirectory}.");
|
||||
_updateTrace.Enqueue($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Add($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
private Task CopyLatestRunnerToRoot(string latestRunnerDirectory, CancellationToken token)
|
||||
@@ -638,7 +436,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
_updateTrace.Add($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
@@ -763,15 +561,9 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
_terminal.WriteLine(currentState);
|
||||
|
||||
var traces = new List<string>();
|
||||
while (_updateTrace.TryDequeue(out var trace))
|
||||
if (_updateTrace.Count > 0)
|
||||
{
|
||||
traces.Add(trace);
|
||||
}
|
||||
|
||||
if (traces.Count > 0)
|
||||
{
|
||||
foreach (var trace in traces)
|
||||
foreach (var trace in _updateTrace)
|
||||
{
|
||||
Trace.Info(trace);
|
||||
}
|
||||
@@ -779,7 +571,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
try
|
||||
{
|
||||
await _runnerServer.UpdateAgentUpdateStateAsync(_poolId, _agentId, currentState, string.Join(Environment.NewLine, traces));
|
||||
await _runnerServer.UpdateAgentUpdateStateAsync(_poolId, _agentId, currentState, string.Join(Environment.NewLine, _updateTrace));
|
||||
_updateTrace.Clear();
|
||||
}
|
||||
catch (VssResourceNotFoundException)
|
||||
@@ -793,330 +585,5 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's externals if we are using a externals trimmed package
|
||||
// Execute the node.js to make sure the copied externals is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
|
||||
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
|
||||
|
||||
// try run node.js to see if current node.js works fine after copy over to new location.
|
||||
var nodeVersions = NodeUtil.BuiltInNodeVersions;
|
||||
foreach (var nodeVersion in nodeVersions)
|
||||
{
|
||||
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
||||
if (File.Exists(newNodeBinary))
|
||||
{
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
var outputs = "";
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
outputs = data.Data;
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
|
||||
// Execute the runner.listener to make sure the copied runtime is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
|
||||
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
|
||||
|
||||
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
|
||||
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
|
||||
{
|
||||
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
|
||||
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
|
||||
|
||||
var waitingTasks = new Dictionary<string, Task>()
|
||||
{
|
||||
{nameof(CloneDotnetRuntime), runtimeCloneTask},
|
||||
{nameof(CloneExternals),externalsCloneTask}
|
||||
};
|
||||
|
||||
while (waitingTasks.Count > 0)
|
||||
{
|
||||
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
|
||||
var complatedTask = await Task.WhenAny(waitingTasks.Values);
|
||||
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneExternals)])
|
||||
{
|
||||
Trace.Info($"Externals clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneExternals));
|
||||
try
|
||||
{
|
||||
if (await externalsCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var externalsHash = await HashFiles(externalsCloneDirectory, token);
|
||||
Trace.Info($"Externals content hash: {externalsHash}");
|
||||
_contentHashes[_externals] = externalsHash;
|
||||
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash externals content: {ex}");
|
||||
}
|
||||
}
|
||||
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
|
||||
{
|
||||
Trace.Info($"Dotnet runtime clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneDotnetRuntime));
|
||||
try
|
||||
{
|
||||
if (await runtimeCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
|
||||
Trace.Info($"Runtime content hash: {runtimeHash}");
|
||||
_contentHashes[_dotnetRuntime] = runtimeHash;
|
||||
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash runtime content: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
|
||||
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(runtimeDir);
|
||||
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var assetsContent = default(string);
|
||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
assetsContent = await streamReader.ReadToEndAsync();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(assetsContent))
|
||||
{
|
||||
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (runnerCoreAssets.Length > 0)
|
||||
{
|
||||
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
IOUtil.CopyDirectory(binDir, runtimeDir, token);
|
||||
|
||||
var clonedFile = 0;
|
||||
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
|
||||
{
|
||||
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
|
||||
IOUtil.DeleteFile(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
clonedFile++;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning externals to {externalsDir}");
|
||||
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(externalsDir);
|
||||
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
|
||||
Trace.Info($"Successfully cloned externals to {externalsDir}.");
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone externals to {externalsDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
|
||||
{
|
||||
Trace.Info($"Calculating hash for {fileFolder}");
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
Trace.Verbose(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: token);
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -469,7 +469,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
try
|
||||
{
|
||||
uploadTimer.Restart();
|
||||
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token))
|
||||
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token, chunkSize: 4 * 1024 * 1024))
|
||||
{
|
||||
if (response == null || response.StatusCode != HttpStatusCode.Created)
|
||||
{
|
||||
|
||||
@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
||||
}
|
||||
else
|
||||
{
|
||||
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||
if (File.Exists(lockFile))
|
||||
{
|
||||
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
||||
}
|
||||
}
|
||||
|
||||
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||
if (File.Exists(shallowLockFile))
|
||||
{
|
||||
|
||||
@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
||||
}
|
||||
else
|
||||
{
|
||||
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||
if (File.Exists(lockFile))
|
||||
{
|
||||
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
||||
}
|
||||
}
|
||||
|
||||
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||
if (File.Exists(shallowLockFile))
|
||||
{
|
||||
|
||||
@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
|
||||
// Create a new token source for the parallel query. The parallel query should be
|
||||
// cancelled after the first error is encountered. Otherwise the number of exceptions
|
||||
// canceled after the first error is encountered. Otherwise the number of exceptions
|
||||
// could get out of control for a large directory with access denied on every file.
|
||||
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
|
||||
{
|
||||
|
||||
@@ -27,11 +27,6 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
|
||||
VssHttpMessageHandler.DefaultWebProxy = proxy;
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
}
|
||||
|
||||
public static VssConnection CreateConnection(Uri serverUri, VssCredentials credentials, IEnumerable<DelegatingHandler> additionalDelegatingHandler = null, TimeSpan? timeout = null)
|
||||
|
||||
@@ -178,7 +178,7 @@ namespace GitHub.Runner.Worker
|
||||
Message = $"Invoked ::stopCommand:: with token: [{stopToken}]",
|
||||
Type = JobTelemetryType.ActionCommand
|
||||
};
|
||||
context.Global.JobTelemetry.Add(telemetry);
|
||||
context.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
|
||||
if (isTokenInvalid && !allowUnsecureStopCommandTokens)
|
||||
|
||||
@@ -651,10 +651,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
try
|
||||
{
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
break;
|
||||
}
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
||||
{
|
||||
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
|
||||
// Some possible cases are:
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -140,7 +141,21 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||
|
||||
ExecutionContext.WriteWebhookPayload();
|
||||
// Makes directory for event_path data
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
|
||||
Directory.CreateDirectory(workflowDirectory);
|
||||
|
||||
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
|
||||
|
||||
// adds the GitHub event path/file if the event exists
|
||||
if (gitHubEvent != null)
|
||||
{
|
||||
var workflowFile = Path.Combine(workflowDirectory, "event.json");
|
||||
Trace.Info($"Write event payload to {workflowFile}");
|
||||
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
|
||||
ExecutionContext.SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
|
||||
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
|
||||
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
|
||||
@@ -259,8 +274,8 @@ namespace GitHub.Runner.Worker
|
||||
actionDirectory: definition.Directory,
|
||||
localActionContainerSetupSteps: localActionContainerSetupSteps);
|
||||
|
||||
// Print out action details and log telemetry
|
||||
handler.PrepareExecution(Stage);
|
||||
// Print out action details
|
||||
handler.PrintActionDetails(Stage);
|
||||
|
||||
// Run the task.
|
||||
try
|
||||
|
||||
@@ -15,8 +15,8 @@ using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Services.WebApi;
|
||||
@@ -52,7 +52,8 @@ namespace GitHub.Runner.Worker
|
||||
Dictionary<string, string> IntraActionState { get; }
|
||||
Dictionary<string, VariableValue> JobOutputs { get; }
|
||||
ActionsEnvironmentReference ActionsEnvironment { get; }
|
||||
ActionsStepTelemetry StepTelemetry { get; }
|
||||
List<ActionsStepTelemetry> ActionsStepsTelemetry { get; }
|
||||
List<JobTelemetry> JobTelemetry { get; }
|
||||
DictionaryContextData ExpressionValues { get; }
|
||||
IList<IFunctionInfo> ExpressionFunctions { get; }
|
||||
JobContext JobContext { get; }
|
||||
@@ -108,17 +109,12 @@ namespace GitHub.Runner.Worker
|
||||
// others
|
||||
void ForceTaskComplete();
|
||||
void RegisterPostJobStep(IStep step);
|
||||
void PublishStepTelemetry();
|
||||
void WriteWebhookPayload();
|
||||
}
|
||||
|
||||
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
||||
{
|
||||
private const int _maxIssueCount = 10;
|
||||
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
|
||||
private const int _maxIssueMessageLength = 4096; // Don't send issue with huge message since we can't forward them from actions to check annotation.
|
||||
private const int _maxIssueCountInTelemetry = 3; // Only send the first 3 issues to telemetry
|
||||
private const int _maxIssueMessageLengthInTelemetry = 256; // Only send the first 256 characters of issue message to telemetry
|
||||
|
||||
private readonly TimelineRecord _record = new TimelineRecord();
|
||||
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
|
||||
@@ -143,7 +139,6 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// only job level ExecutionContext will track throttling delay.
|
||||
private long _totalThrottlingDelayInMilliseconds = 0;
|
||||
private bool _stepTelemetryPublished = false;
|
||||
|
||||
public Guid Id => _record.Id;
|
||||
public Guid EmbeddedId { get; private set; }
|
||||
@@ -157,7 +152,8 @@ namespace GitHub.Runner.Worker
|
||||
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
|
||||
|
||||
public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
|
||||
public ActionsStepTelemetry StepTelemetry { get; } = new ActionsStepTelemetry();
|
||||
public List<ActionsStepTelemetry> ActionsStepsTelemetry { get; private set; }
|
||||
public List<JobTelemetry> JobTelemetry { get; private set; }
|
||||
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
||||
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
|
||||
|
||||
@@ -277,9 +273,9 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
|
||||
}
|
||||
else
|
||||
else
|
||||
{
|
||||
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
|
||||
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -298,20 +294,7 @@ namespace GitHub.Runner.Worker
|
||||
Root.PostJobSteps.Push(step);
|
||||
}
|
||||
|
||||
public IExecutionContext CreateChild(
|
||||
Guid recordId,
|
||||
string displayName,
|
||||
string refName,
|
||||
string scopeName,
|
||||
string contextName,
|
||||
ActionRunStage stage,
|
||||
Dictionary<string, string> intraActionState = null,
|
||||
int? recordOrder = null,
|
||||
IPagingLogger logger = null,
|
||||
bool isEmbedded = false,
|
||||
CancellationTokenSource cancellationTokenSource = null,
|
||||
Guid embeddedId = default(Guid),
|
||||
string siblingScopeName = null)
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
@@ -323,6 +306,7 @@ namespace GitHub.Runner.Worker
|
||||
child.Stage = stage;
|
||||
child.EmbeddedId = embeddedId;
|
||||
child.SiblingScopeName = siblingScopeName;
|
||||
child.JobTelemetry = JobTelemetry;
|
||||
if (intraActionState == null)
|
||||
{
|
||||
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -362,9 +346,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
child.IsEmbedded = isEmbedded;
|
||||
child.StepTelemetry.StepId = recordId;
|
||||
child.StepTelemetry.Stage = stage.ToString();
|
||||
child.StepTelemetry.IsEmbedded = isEmbedded;
|
||||
|
||||
return child;
|
||||
}
|
||||
@@ -373,13 +354,7 @@ namespace GitHub.Runner.Worker
|
||||
/// An embedded execution context shares the same record ID, record name, logger,
|
||||
/// and a linked cancellation token.
|
||||
/// </summary>
|
||||
public IExecutionContext CreateEmbeddedChild(
|
||||
string scopeName,
|
||||
string contextName,
|
||||
Guid embeddedId,
|
||||
ActionRunStage stage,
|
||||
Dictionary<string, string> intraActionState = null,
|
||||
string siblingScopeName = null)
|
||||
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
|
||||
{
|
||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
|
||||
}
|
||||
@@ -429,8 +404,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
PublishStepTelemetry();
|
||||
|
||||
if (Root != this)
|
||||
{
|
||||
// only dispose TokenSource for step level ExecutionContext
|
||||
@@ -546,20 +519,11 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message);
|
||||
if (issue.Message.Length > _maxIssueMessageLength)
|
||||
{
|
||||
issue.Message = issue.Message[.._maxIssueMessageLength];
|
||||
}
|
||||
|
||||
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
|
||||
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
|
||||
if (_record.Order != null)
|
||||
{
|
||||
issue.Data["stepNumber"] = _record.Order.ToString();
|
||||
}
|
||||
|
||||
if (issue.Type == IssueType.Error)
|
||||
{
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Error, logMessage);
|
||||
@@ -575,6 +539,8 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (issue.Type == IssueType.Warning)
|
||||
{
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Warning, logMessage);
|
||||
@@ -590,6 +556,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (issue.Type == IssueType.Notice)
|
||||
{
|
||||
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
|
||||
@@ -679,29 +648,22 @@ namespace GitHub.Runner.Worker
|
||||
// Variables
|
||||
Global.Variables = new Variables(HostContext, message.Variables);
|
||||
|
||||
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
|
||||
{
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
|
||||
}
|
||||
|
||||
// Environment variables shared across all actions
|
||||
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Job defaults shared across all actions
|
||||
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Job Telemetry
|
||||
Global.JobTelemetry = new List<JobTelemetry>();
|
||||
|
||||
// ActionsStepTelemetry for entire job
|
||||
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
|
||||
|
||||
// Job Outputs
|
||||
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Actions environment
|
||||
ActionsEnvironment = message.ActionsEnvironment;
|
||||
|
||||
// ActionsStepTelemetry
|
||||
ActionsStepsTelemetry = new List<ActionsStepTelemetry>();
|
||||
|
||||
JobTelemetry = new List<JobTelemetry>();
|
||||
|
||||
// Service container info
|
||||
Global.ServiceContainers = new List<ContainerInfo>();
|
||||
@@ -933,83 +895,6 @@ namespace GitHub.Runner.Worker
|
||||
return Root._matchers ?? Array.Empty<IssueMatcherConfig>();
|
||||
}
|
||||
|
||||
public void PublishStepTelemetry()
|
||||
{
|
||||
if (!_stepTelemetryPublished)
|
||||
{
|
||||
// Add to the global steps telemetry only if we have something to log.
|
||||
if (!string.IsNullOrEmpty(StepTelemetry?.Type))
|
||||
{
|
||||
if (!IsEmbedded)
|
||||
{
|
||||
StepTelemetry.Result = _record.Result;
|
||||
}
|
||||
|
||||
if (!IsEmbedded &&
|
||||
_record.FinishTime != null &&
|
||||
_record.StartTime != null)
|
||||
{
|
||||
StepTelemetry.ExecutionTimeInSeconds = (int)Math.Ceiling((_record.FinishTime - _record.StartTime)?.TotalSeconds ?? 0);
|
||||
}
|
||||
|
||||
if (!IsEmbedded &&
|
||||
_record.Issues.Count > 0)
|
||||
{
|
||||
foreach (var issue in _record.Issues)
|
||||
{
|
||||
if ((issue.Type == IssueType.Error || issue.Type == IssueType.Warning) &&
|
||||
!string.IsNullOrEmpty(issue.Message))
|
||||
{
|
||||
string issueTelemetry;
|
||||
if (issue.Message.Length > _maxIssueMessageLengthInTelemetry)
|
||||
{
|
||||
issueTelemetry = $"{issue.Message[.._maxIssueMessageLengthInTelemetry]}";
|
||||
}
|
||||
else
|
||||
{
|
||||
issueTelemetry = issue.Message;
|
||||
}
|
||||
|
||||
StepTelemetry.ErrorMessages.Add(issueTelemetry);
|
||||
|
||||
// Only send over the first 3 issues to avoid sending too much data.
|
||||
if (StepTelemetry.ErrorMessages.Count >= _maxIssueCountInTelemetry)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Publish step telemetry for current step {StringUtil.ConvertToJson(StepTelemetry)}.");
|
||||
Global.StepsTelemetry.Add(StepTelemetry);
|
||||
_stepTelemetryPublished = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Step telemetry has already been published.");
|
||||
}
|
||||
}
|
||||
|
||||
public void WriteWebhookPayload()
|
||||
{
|
||||
// Makes directory for event_path data
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
|
||||
Directory.CreateDirectory(workflowDirectory);
|
||||
var gitHubEvent = GetGitHubContext("event");
|
||||
|
||||
// adds the GitHub event path/file if the event exists
|
||||
if (gitHubEvent != null)
|
||||
{
|
||||
var workflowFile = Path.Combine(workflowDirectory, "event.json");
|
||||
Trace.Info($"Write event payload to {workflowFile}");
|
||||
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
|
||||
SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
|
||||
{
|
||||
_mainTimelineId = timelineId;
|
||||
|
||||
@@ -7,7 +7,6 @@ using GitHub.Runner.Sdk;
|
||||
using System.Reflection;
|
||||
using System.Threading;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.Runner.Common.Util;
|
||||
|
||||
namespace GitHub.Runner.Worker.Expressions
|
||||
{
|
||||
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||
|
||||
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));
|
||||
|
||||
@@ -181,10 +181,6 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
||||
}
|
||||
if (newline == null)
|
||||
{
|
||||
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
|
||||
}
|
||||
endIndex = index - newline.Length;
|
||||
tempLine = ReadLine(text, ref index, out newline);
|
||||
}
|
||||
@@ -263,72 +259,4 @@ namespace GitHub.Runner.Worker
|
||||
return text.Substring(originalIndex, lfIndex - originalIndex);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
||||
{
|
||||
private const int _attachmentSizeLimit = 128 * 1024;
|
||||
|
||||
public string ContextName => "step_summary";
|
||||
public string FilePrefix => "step_summary_";
|
||||
|
||||
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||
{
|
||||
if (!context.Global.Variables.GetBoolean("DistributedTask.UploadStepSummary") ?? true)
|
||||
{
|
||||
Trace.Info("Step Summary is disabled; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
if (String.IsNullOrEmpty(filePath) || !File.Exists(filePath))
|
||||
{
|
||||
Trace.Info($"Step Summary file ({filePath}) does not exist; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var fileSize = new FileInfo(filePath).Length;
|
||||
if (fileSize == 0)
|
||||
{
|
||||
Trace.Info($"Step Summary file ({filePath}) is empty; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
if (fileSize > _attachmentSizeLimit)
|
||||
{
|
||||
context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, _attachmentSizeLimit / 1024, fileSize / 1024));
|
||||
Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
Trace.Verbose($"Step Summary file exists: {filePath} and has a file size of {fileSize} bytes");
|
||||
var scrubbedFilePath = filePath + "-scrubbed";
|
||||
|
||||
using (var streamReader = new StreamReader(filePath))
|
||||
using (var streamWriter = new StreamWriter(scrubbedFilePath))
|
||||
{
|
||||
string line;
|
||||
while ((line = streamReader.ReadLine()) != null)
|
||||
{
|
||||
var maskedLine = HostContext.SecretMasker.MaskSecrets(line);
|
||||
streamWriter.WriteLine(maskedLine);
|
||||
}
|
||||
}
|
||||
|
||||
var attachmentName = context.Id.ToString();
|
||||
|
||||
Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})");
|
||||
// Attachments must be added to the parent context (job), not the current context (step)
|
||||
context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Trace.Error($"Error while processing file ({filePath}): {e}");
|
||||
context.Error($"Failed to create step summary using 'GITHUB_STEP_SUMMARY': {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
"action",
|
||||
"action_path",
|
||||
"action_ref",
|
||||
"action_repository",
|
||||
"action",
|
||||
"actor",
|
||||
"api_url",
|
||||
"base_ref",
|
||||
@@ -22,20 +22,18 @@ namespace GitHub.Runner.Worker
|
||||
"head_ref",
|
||||
"job",
|
||||
"path",
|
||||
"ref",
|
||||
"ref_name",
|
||||
"ref_protected",
|
||||
"ref_type",
|
||||
"ref",
|
||||
"repository_owner",
|
||||
"repository",
|
||||
"repository_owner",
|
||||
"retention_days",
|
||||
"run_attempt",
|
||||
"run_id",
|
||||
"run_number",
|
||||
"server_url",
|
||||
"sha",
|
||||
"step_summary",
|
||||
"triggering_actor",
|
||||
"workflow",
|
||||
"workspace",
|
||||
};
|
||||
|
||||
@@ -14,8 +14,6 @@ namespace GitHub.Runner.Worker
|
||||
public PlanFeatures Features { get; set; }
|
||||
public IList<String> FileTable { get; set; }
|
||||
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
|
||||
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
|
||||
public List<JobTelemetry> JobTelemetry { get; set; }
|
||||
public TaskOrchestrationPlanReference Plan { get; set; }
|
||||
public List<string> PrependPath { get; set; }
|
||||
public List<ContainerInfo> ServiceContainers { get; set; }
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
ArgUtil.NotNull(Data.PreSteps, nameof(Data.PreSteps));
|
||||
steps = Data.PreSteps;
|
||||
}
|
||||
}
|
||||
else if (stage == ActionRunStage.Post)
|
||||
{
|
||||
ArgUtil.NotNull(Data.PostSteps, nameof(Data.PostSteps));
|
||||
@@ -60,14 +60,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
Trace.Info($"Skipping executing post step id: {step.Id}, name: ${step.DisplayName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
|
||||
steps = Data.Steps;
|
||||
}
|
||||
|
||||
// Set extra telemetry base on the current context.
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
var hasRunsStep = false;
|
||||
@@ -83,16 +83,20 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
hasUsesStep = true;
|
||||
}
|
||||
}
|
||||
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
|
||||
ExecutionContext.StepTelemetry.HasRunsStep = hasRunsStep;
|
||||
ExecutionContext.StepTelemetry.HasUsesStep = hasUsesStep;
|
||||
ExecutionContext.StepTelemetry.StepCount = steps.Count;
|
||||
var pathReference = Action as Pipelines.RepositoryPathReference;
|
||||
var telemetry = new ActionsStepTelemetry {
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = "composite",
|
||||
HasRunsStep = hasRunsStep,
|
||||
HasUsesStep = hasUsesStep,
|
||||
StepCount = steps.Count
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
}
|
||||
ExecutionContext.StepTelemetry.Type = "composite";
|
||||
|
||||
|
||||
try
|
||||
{
|
||||
// Inputs of the composite step
|
||||
@@ -113,7 +117,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Create embedded steps
|
||||
var embeddedSteps = new List<IStep>();
|
||||
|
||||
// If we need to setup containers beforehand, do it
|
||||
// If we need to setup containers beforehand, do it
|
||||
// only relevant for local composite actions that need to JIT download/setup containers
|
||||
if (LocalActionContainerSetupSteps != null && LocalActionContainerSetupSteps.Count > 0)
|
||||
{
|
||||
@@ -148,7 +152,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
else
|
||||
{
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
}
|
||||
|
||||
// Shallow copy github context
|
||||
@@ -299,13 +303,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
||||
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Test the condition again. The job was cancelled after the condition was originally evaluated.
|
||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
||||
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
|
||||
{
|
||||
// Mark job as cancelled
|
||||
ExecutionContext.Root.Result = TaskResult.Canceled;
|
||||
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
|
||||
|
||||
|
||||
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
|
||||
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
|
||||
var conditionReTestResult = false;
|
||||
@@ -374,14 +378,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
// Condition is false
|
||||
Trace.Info("Skipping step due to condition evaluation.");
|
||||
SetStepConclusion(step, TaskResult.Skipped);
|
||||
step.ExecutionContext.Result = TaskResult.Skipped;
|
||||
continue;
|
||||
}
|
||||
else if (conditionEvaluateError != null)
|
||||
{
|
||||
// Condition error
|
||||
step.ExecutionContext.Error(conditionEvaluateError);
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
ExecutionContext.Result = TaskResult.Failed;
|
||||
break;
|
||||
}
|
||||
@@ -389,7 +393,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
await RunStepAsync(step);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -399,15 +403,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
jobCancelRegister = null;
|
||||
}
|
||||
}
|
||||
// Check failed or cancelled
|
||||
|
||||
// Check failed or canceled
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||
{
|
||||
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
|
||||
ExecutionContext.Result = TaskResultUtil.MergeTaskResults(ExecutionContext.Result, step.ExecutionContext.Result.Value);
|
||||
}
|
||||
|
||||
// Update context
|
||||
SetStepsContext(step);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,13 +431,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
Trace.Error($"Caught timeout exception from step: {ex.Message}");
|
||||
step.ExecutionContext.Error("The action has timed out.");
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Caught cancellation exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
SetStepConclusion(step, TaskResult.Canceled);
|
||||
step.ExecutionContext.Result = TaskResult.Canceled;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -443,33 +445,17 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Log the error and fail the step
|
||||
Trace.Error($"Caught exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
|
||||
// Merge execution context result with command result
|
||||
if (step.ExecutionContext.CommandResult != null)
|
||||
{
|
||||
SetStepConclusion(step, Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value));
|
||||
step.ExecutionContext.Result = Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
|
||||
}
|
||||
|
||||
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||
step.ExecutionContext.Debug($"Finished: {step.DisplayName}");
|
||||
step.ExecutionContext.PublishStepTelemetry();
|
||||
}
|
||||
|
||||
private void SetStepConclusion(IStep step, TaskResult result)
|
||||
{
|
||||
step.ExecutionContext.Result = result;
|
||||
SetStepsContext(step);
|
||||
}
|
||||
private void SetStepsContext(IStep step)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(step.ExecutionContext.ContextName) && !step.ExecutionContext.ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
// TODO: when we support continue on error, we may need to do logic here to change conclusion based on the continue on error result
|
||||
step.ExecutionContext.Global.StepsContext.SetOutcome(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
step.ExecutionContext.Global.StepsContext.SetConclusion(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using System.Linq;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -70,13 +70,18 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
string type = Action.Type == Pipelines.ActionSourceType.Repository ? "Dockerfile" : "DockerHub";
|
||||
// Set extra telemetry base on the current context.
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
var telemetry = new ActionsStepTelemetry {
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = type
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
}
|
||||
ExecutionContext.StepTelemetry.Type = type;
|
||||
|
||||
// run container
|
||||
var container = new ContainerInfo(HostContext)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -22,7 +22,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
string ActionDirectory { get; set; }
|
||||
List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
|
||||
Task RunAsync(ActionRunStage stage);
|
||||
void PrepareExecution(ActionRunStage stage);
|
||||
void PrintActionDetails(ActionRunStage stage);
|
||||
}
|
||||
|
||||
public abstract class Handler : RunnerService
|
||||
@@ -36,7 +36,6 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
protected IActionCommandManager ActionCommandManager { get; private set; }
|
||||
|
||||
public Pipelines.ActionStepDefinitionReference Action { get; set; }
|
||||
public bool IsActionStep => Action != null;
|
||||
public Dictionary<string, string> Environment { get; set; }
|
||||
public Variables RuntimeVariables { get; set; }
|
||||
public IExecutionContext ExecutionContext { get; set; }
|
||||
@@ -45,50 +44,29 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
public string ActionDirectory { get; set; }
|
||||
public List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
|
||||
|
||||
public void PrepareExecution(ActionRunStage stage)
|
||||
public virtual string GetActionRef()
|
||||
{
|
||||
// Print out action details
|
||||
PrintActionDetails(stage);
|
||||
|
||||
// Get telemetry for the action
|
||||
PopulateActionTelemetry(stage);
|
||||
}
|
||||
|
||||
protected void PopulateActionTelemetry(ActionRunStage stage)
|
||||
{
|
||||
if (!IsActionStep)
|
||||
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "runner";
|
||||
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "docker";
|
||||
var registryAction = Action as Pipelines.ContainerRegistryReference;
|
||||
ExecutionContext.StepTelemetry.Action = registryAction.Image;
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.Script)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "run";
|
||||
return registryAction.Image;
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "repository";
|
||||
var repoAction = Action as Pipelines.RepositoryPathReference;
|
||||
if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Action = repoAction.Path;
|
||||
return repoAction.Path;
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Ref = repoAction.Ref;
|
||||
if (string.IsNullOrEmpty(repoAction.Path))
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Action = repoAction.Name;
|
||||
return $"{repoAction.Name}@{repoAction.Ref}";
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Action = $"{repoAction.Name}/{repoAction.Path}";
|
||||
return $"{repoAction.Name}/{repoAction.Path}@{repoAction.Ref}";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -97,11 +75,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// this should never happen
|
||||
Trace.Error($"Can't generate ref for {Action.Type.ToString()}");
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
protected virtual void PrintActionDetails(ActionRunStage stage)
|
||||
public virtual void PrintActionDetails(ActionRunStage stage)
|
||||
{
|
||||
|
||||
|
||||
if (stage == ActionRunStage.Post)
|
||||
{
|
||||
ExecutionContext.Output($"Post job cleanup.");
|
||||
|
||||
@@ -55,23 +55,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
else if (data.ExecutionType == ActionExecutionType.NodeJS)
|
||||
{
|
||||
handler = HostContext.CreateService<INodeScriptActionHandler>();
|
||||
var nodeData = data as NodeJSActionExecutionData;
|
||||
|
||||
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
|
||||
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
|
||||
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
|
||||
{
|
||||
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
|
||||
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
|
||||
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
|
||||
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
|
||||
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
|
||||
if (!isOptOut)
|
||||
{
|
||||
nodeData.NodeVersion = "node16";
|
||||
}
|
||||
}
|
||||
(handler as INodeScriptActionHandler).Data = nodeData;
|
||||
(handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData;
|
||||
}
|
||||
else if (data.ExecutionType == ActionExecutionType.Script)
|
||||
{
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using System;
|
||||
using System.Linq;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -73,13 +74,19 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
target = Data.Post;
|
||||
}
|
||||
|
||||
// Set extra telemetry base on the current context.
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
var telemetry = new ActionsStepTelemetry
|
||||
{
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = Data.NodeVersion
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
}
|
||||
ExecutionContext.StepTelemetry.Type = Data.NodeVersion;
|
||||
|
||||
ArgUtil.NotNullOrEmpty(target, nameof(target));
|
||||
target = Path.Combine(ActionDirectory, target);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Common;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
@@ -35,8 +35,6 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));
|
||||
// Set extra telemetry base on the current context.
|
||||
ExecutionContext.StepTelemetry.Type = plugin;
|
||||
|
||||
// Update the env dictionary.
|
||||
AddPrependPathToEnvironment();
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using System.Linq;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
@@ -22,24 +21,18 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
public ScriptActionExecutionData Data { get; set; }
|
||||
|
||||
protected override void PrintActionDetails(ActionRunStage stage)
|
||||
public override void PrintActionDetails(ActionRunStage stage)
|
||||
{
|
||||
// if we're executing a Job Extension, we won't have an 'Action'
|
||||
if (!IsActionStep)
|
||||
|
||||
if (stage == ActionRunStage.Post)
|
||||
{
|
||||
if (Inputs.TryGetValue("path", out var path))
|
||||
{
|
||||
ExecutionContext.Output($"##[group]Run '{path}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
|
||||
}
|
||||
throw new NotSupportedException("Script action should not have 'Post' job action.");
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.Script)
|
||||
|
||||
Inputs.TryGetValue("script", out string contents);
|
||||
contents = contents ?? string.Empty;
|
||||
if (Action.Type == Pipelines.ActionSourceType.Script)
|
||||
{
|
||||
Inputs.TryGetValue("script", out string contents);
|
||||
contents = contents ?? string.Empty;
|
||||
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
|
||||
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
|
||||
if (firstNewLine >= 0)
|
||||
@@ -48,16 +41,17 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
ExecutionContext.Output($"##[group]Run {firstLine}");
|
||||
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
|
||||
foreach (var line in multiLines)
|
||||
{
|
||||
// Bright Cyan color
|
||||
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}");
|
||||
throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}");
|
||||
}
|
||||
|
||||
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
|
||||
foreach (var line in multiLines)
|
||||
{
|
||||
// Bright Cyan color
|
||||
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
|
||||
}
|
||||
|
||||
string argFormat;
|
||||
@@ -137,6 +131,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public async Task RunAsync(ActionRunStage stage)
|
||||
{
|
||||
if (stage == ActionRunStage.Post)
|
||||
{
|
||||
throw new NotSupportedException("Script action should not have 'Post' job action.");
|
||||
}
|
||||
|
||||
// Validate args
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||
@@ -145,6 +144,17 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
var telemetry = new ActionsStepTelemetry
|
||||
{
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = "run",
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
}
|
||||
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
|
||||
Inputs.TryGetValue("script", out var contents);
|
||||
@@ -212,35 +222,15 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
}
|
||||
|
||||
// Don't override runner telemetry here
|
||||
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Action = shellCommand;
|
||||
}
|
||||
|
||||
// No arg format was given, shell must be a built-in
|
||||
if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}"))
|
||||
{
|
||||
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
|
||||
}
|
||||
string scriptFilePath, resolvedScriptPath;
|
||||
if (IsActionStep)
|
||||
{
|
||||
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
|
||||
scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
|
||||
resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
|
||||
}
|
||||
else
|
||||
{
|
||||
// JobExtensionRunners run a script file, we load that from the inputs here
|
||||
if (!Inputs.ContainsKey("path"))
|
||||
{
|
||||
throw new ArgumentException("Expected 'path' input to be set");
|
||||
}
|
||||
scriptFilePath = Inputs["path"];
|
||||
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
|
||||
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
|
||||
}
|
||||
|
||||
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
|
||||
var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
|
||||
var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
|
||||
|
||||
// Format arg string with script path
|
||||
var arguments = string.Format(argFormat, resolvedScriptPath);
|
||||
@@ -256,12 +246,9 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
#else
|
||||
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
|
||||
var encoding = new UTF8Encoding(false);
|
||||
#endif
|
||||
if (IsActionStep)
|
||||
{
|
||||
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
|
||||
File.WriteAllText(scriptFilePath, contents, encoding);
|
||||
}
|
||||
#endif
|
||||
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
|
||||
File.WriteAllText(scriptFilePath, contents, encoding);
|
||||
|
||||
// Prepend PATH
|
||||
AddPrependPathToEnvironment();
|
||||
@@ -284,10 +271,10 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
|
||||
{
|
||||
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
|
||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
|
||||
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
|
||||
fileName = node;
|
||||
fileName = node12;
|
||||
}
|
||||
#endif
|
||||
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -81,22 +79,5 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
|
||||
}
|
||||
}
|
||||
|
||||
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
|
||||
{
|
||||
var format = "{0} {1}";
|
||||
switch (Path.GetExtension(path))
|
||||
{
|
||||
case ".sh":
|
||||
// use 'sh' args but prefer bash
|
||||
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
|
||||
return string.Format(format, pathToShell, _defaultArguments["sh"]);
|
||||
case ".ps1":
|
||||
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
|
||||
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
|
||||
default:
|
||||
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
@@ -57,8 +56,6 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Create a new timeline record for 'Set up job'
|
||||
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre);
|
||||
context.StepTelemetry.Type = "runner";
|
||||
context.StepTelemetry.Action = "setup_job";
|
||||
|
||||
List<IStep> preJobSteps = new List<IStep>();
|
||||
List<IStep> jobSteps = new List<IStep>();
|
||||
@@ -125,20 +122,20 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
try
|
||||
{
|
||||
var tokenPermissions = jobContext.Global.Variables.Get("system.github.token.permissions") ?? "";
|
||||
if (!string.IsNullOrEmpty(tokenPermissions))
|
||||
{
|
||||
context.Output($"##[group]GITHUB_TOKEN Permissions");
|
||||
var permissions = StringUtil.ConvertFromJson<Dictionary<string, string>>(tokenPermissions);
|
||||
foreach (KeyValuePair<string, string> entry in permissions)
|
||||
foreach(KeyValuePair<string, string> entry in permissions)
|
||||
{
|
||||
context.Output($"{entry.Key}: {entry.Value}");
|
||||
}
|
||||
context.Output("##[endgroup]");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Output($"Fail to parse and display GITHUB_TOKEN permissions list: {ex.Message}");
|
||||
@@ -249,19 +246,6 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info("Downloading actions");
|
||||
var actionManager = HostContext.GetService<IActionManager>();
|
||||
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
|
||||
|
||||
// add hook to preJobSteps
|
||||
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
|
||||
if (!string.IsNullOrEmpty(startedHookPath))
|
||||
{
|
||||
var hookProvider = HostContext.GetService<IJobHookProvider>();
|
||||
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
|
||||
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
|
||||
condition: $"{PipelineTemplateConstants.Always}()",
|
||||
displayName: Constants.Hooks.JobStartedStepName,
|
||||
data: (object)jobHookData));
|
||||
}
|
||||
|
||||
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
|
||||
|
||||
// Add start-container steps, record and stop-container steps
|
||||
@@ -328,9 +312,7 @@ namespace GitHub.Runner.Worker
|
||||
JobExtensionRunner extensionStep = step as JobExtensionRunner;
|
||||
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
|
||||
Guid stepId = Guid.NewGuid();
|
||||
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, stepId.ToString("N"), null, stepId.ToString("N"), ActionRunStage.Pre);
|
||||
extensionStep.ExecutionContext.StepTelemetry.Type = "runner";
|
||||
extensionStep.ExecutionContext.StepTelemetry.Action = extensionStep.DisplayName.ToLowerInvariant().Replace(' ', '_');
|
||||
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"), ActionRunStage.Pre);
|
||||
}
|
||||
else if (step is IActionRunner actionStep)
|
||||
{
|
||||
@@ -351,18 +333,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Register Job Completed hook if the variable is set
|
||||
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
|
||||
if (!string.IsNullOrEmpty(completedHookPath))
|
||||
{
|
||||
var hookProvider = HostContext.GetService<IJobHookProvider>();
|
||||
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
|
||||
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
|
||||
condition: $"{PipelineTemplateConstants.Always}()",
|
||||
displayName: Constants.Hooks.JobCompletedStepName,
|
||||
data: (object)jobHookData));
|
||||
}
|
||||
|
||||
List<IStep> steps = new List<IStep>();
|
||||
steps.AddRange(preJobSteps);
|
||||
steps.AddRange(jobSteps);
|
||||
@@ -431,8 +401,6 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// create a new timeline record node for 'Finalize job'
|
||||
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
|
||||
context.StepTelemetry.Type = "runner";
|
||||
context.StepTelemetry.Action = "complete_job";
|
||||
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
|
||||
{
|
||||
try
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[ServiceLocator(Default = typeof(JobHookProvider))]
|
||||
public interface IJobHookProvider : IRunnerService
|
||||
{
|
||||
Task RunHook(IExecutionContext executionContext, object data);
|
||||
}
|
||||
|
||||
public class JobHookData
|
||||
{
|
||||
public string Path {get; private set;}
|
||||
public ActionRunStage Stage {get; private set;}
|
||||
|
||||
public JobHookData(ActionRunStage stage, string path)
|
||||
{
|
||||
Path = path;
|
||||
Stage = stage;
|
||||
}
|
||||
}
|
||||
|
||||
public class JobHookProvider : RunnerService, IJobHookProvider
|
||||
{
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
}
|
||||
|
||||
public async Task RunHook(IExecutionContext executionContext, object data)
|
||||
{
|
||||
// Get Inputs
|
||||
var hookData = data as JobHookData;
|
||||
ArgUtil.NotNull(hookData, nameof(JobHookData));
|
||||
|
||||
var displayName = hookData.Stage == ActionRunStage.Pre ? Constants.Hooks.JobStartedStepName : Constants.Hooks.JobCompletedStepName;
|
||||
// Log to users so that they know how this step was injected
|
||||
executionContext.Output($"A '{displayName}' has been configured by the self-hosted runner administrator");
|
||||
|
||||
// Validate script file.
|
||||
if (!File.Exists(hookData.Path))
|
||||
{
|
||||
throw new FileNotFoundException("File doesn't exist");
|
||||
}
|
||||
|
||||
executionContext.WriteWebhookPayload();
|
||||
|
||||
// Create the handler data.
|
||||
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
|
||||
var stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
|
||||
Dictionary<string, string> inputs = new()
|
||||
{
|
||||
["path"] = hookData.Path,
|
||||
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
|
||||
};
|
||||
|
||||
// Create the handler
|
||||
var handlerFactory = HostContext.GetService<IHandlerFactory>();
|
||||
var handler = handlerFactory.Create(
|
||||
executionContext,
|
||||
action: null,
|
||||
stepHost,
|
||||
new ScriptActionExecutionData(),
|
||||
inputs,
|
||||
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||
executionContext.Global.Variables,
|
||||
actionDirectory: scriptDirectory,
|
||||
localActionContainerSetupSteps: null);
|
||||
handler.PrepareExecution(hookData.Stage);
|
||||
|
||||
// Setup file commands
|
||||
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
|
||||
fileCommandManager.InitializeFiles(executionContext, null);
|
||||
|
||||
// Run the step and process the file commands
|
||||
try
|
||||
{
|
||||
await handler.RunAsync(hookData.Stage);
|
||||
}
|
||||
finally
|
||||
{
|
||||
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
using System;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using System.Net.Http;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -25,7 +25,6 @@ namespace GitHub.Runner.Worker
|
||||
public sealed class JobRunner : RunnerService, IJobRunner
|
||||
{
|
||||
private IJobServerQueue _jobServerQueue;
|
||||
private RunnerSettings _runnerSettings;
|
||||
private ITempDirectoryManager _tempDirectoryManager;
|
||||
|
||||
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
|
||||
@@ -109,8 +108,8 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.SetRunnerContext("os", VarUtil.OS);
|
||||
jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture);
|
||||
|
||||
_runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
|
||||
jobContext.SetRunnerContext("name", _runnerSettings.AgentName);
|
||||
var runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
|
||||
jobContext.SetRunnerContext("name", runnerSettings.AgentName);
|
||||
|
||||
string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools);
|
||||
Directory.CreateDirectory(toolsDirectory);
|
||||
@@ -131,9 +130,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// set the job to cancelled
|
||||
// set the job to canceled
|
||||
// don't log error issue to job ExecutionContext, since server owns the job level issue
|
||||
Trace.Error($"Job is cancelled during initialize.");
|
||||
Trace.Error($"Job is canceled during initialize.");
|
||||
Trace.Error($"Caught exception: {ex}");
|
||||
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
|
||||
}
|
||||
@@ -210,53 +209,6 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.Debug($"Finishing: {message.JobDisplayName}");
|
||||
TaskResult result = jobContext.Complete(taskResult);
|
||||
|
||||
if (_runnerSettings.DisableUpdate == true)
|
||||
{
|
||||
try
|
||||
{
|
||||
var currentVersion = new PackageVersion(BuildConstants.RunnerPackage.Version);
|
||||
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
VssCredentials serverCredential = VssUtil.GetVssCredential(systemConnection);
|
||||
|
||||
var runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
await runnerServer.ConnectAsync(systemConnection.Url, serverCredential);
|
||||
var serverPackages = await runnerServer.GetPackagesAsync("agent", BuildConstants.RunnerPackage.PackageName, 5, includeToken: false, cancellationToken: CancellationToken.None);
|
||||
if (serverPackages.Count > 0)
|
||||
{
|
||||
serverPackages = serverPackages.OrderByDescending(x => x.Version).ToList();
|
||||
Trace.Info($"Newer packages {StringUtil.ConvertToJson(serverPackages.Select(x => x.Version.ToString()))}");
|
||||
|
||||
var warnOnFailedJob = false; // any minor/patch version behind.
|
||||
var warnOnOldRunnerVersion = false; // >= 2 minor version behind
|
||||
if (serverPackages.Any(x => x.Version.CompareTo(currentVersion) > 0))
|
||||
{
|
||||
Trace.Info($"Current runner version {currentVersion} is behind the latest runner version {serverPackages[0].Version}.");
|
||||
warnOnFailedJob = true;
|
||||
}
|
||||
|
||||
if (serverPackages.Where(x => x.Version.Major == currentVersion.Major && x.Version.Minor > currentVersion.Minor).Count() > 1)
|
||||
{
|
||||
Trace.Info($"Current runner version {currentVersion} is way behind the latest runner version {serverPackages[0].Version}.");
|
||||
warnOnOldRunnerVersion = true;
|
||||
}
|
||||
|
||||
if (result == TaskResult.Failed && warnOnFailedJob)
|
||||
{
|
||||
jobContext.Warning($"This job failure may be caused by using an out of date self-hosted runner. You are currently using runner version {currentVersion}. Please update to the latest version {serverPackages[0].Version}");
|
||||
}
|
||||
else if (warnOnOldRunnerVersion)
|
||||
{
|
||||
jobContext.Warning($"This self-hosted runner is currently using runner version {currentVersion}. This version is out of date. Please update to the latest version {serverPackages[0].Version}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Ignore any error since suggest runner update is best effort.
|
||||
Trace.Error($"Caught exception during runner version check: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await ShutdownQueue(throwOnFailure: true);
|
||||
@@ -279,13 +231,14 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Load any upgrade telemetry
|
||||
LoadFromTelemetryFile(jobContext.Global.JobTelemetry);
|
||||
LoadFromTelemetryFile(jobContext.JobTelemetry);
|
||||
|
||||
// Make sure we don't submit secrets as telemetry
|
||||
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
|
||||
MaskTelemetrySecrets(jobContext.JobTelemetry);
|
||||
|
||||
Trace.Info("Raising job completed event.");
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.ActionsStepsTelemetry, jobContext.JobTelemetry);
|
||||
|
||||
Trace.Info($"Raising job completed event");
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.Global.StepsTelemetry, jobContext.Global.JobTelemetry);
|
||||
|
||||
var completeJobRetryLimit = 5;
|
||||
var exceptions = new List<Exception>();
|
||||
|
||||
@@ -130,7 +130,7 @@ namespace GitHub.Runner.Worker
|
||||
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
||||
if (!jobContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Test the condition again. The job was cancelled after the condition was originally evaluated.
|
||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
||||
jobCancelRegister = jobContext.CancellationToken.Register(() =>
|
||||
{
|
||||
// Mark job as cancelled
|
||||
|
||||
@@ -9,7 +9,6 @@ using System.Threading.Tasks;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Text;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -44,7 +43,6 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNullOrEmpty(pipeOut, nameof(pipeOut));
|
||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||
var jobRunner = HostContext.CreateService<IJobRunner>();
|
||||
var terminal = HostContext.GetService<ITerminal>();
|
||||
|
||||
using (var channel = HostContext.CreateService<IProcessChannel>())
|
||||
using (var jobRequestCancellationToken = CancellationTokenSource.CreateLinkedTokenSource(HostContext.RunnerShutdownToken))
|
||||
@@ -66,22 +64,7 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info("Message received.");
|
||||
ArgUtil.Equal(MessageType.NewJobRequest, channelMessage.MessageType, nameof(channelMessage.MessageType));
|
||||
ArgUtil.NotNullOrEmpty(channelMessage.Body, nameof(channelMessage.Body));
|
||||
Pipelines.AgentJobRequestMessage jobMessage = null;
|
||||
try
|
||||
{
|
||||
jobMessage = StringUtil.ConvertFromJson<Pipelines.AgentJobRequestMessage>(channelMessage.Body);
|
||||
}
|
||||
catch (JsonReaderException ex)
|
||||
{
|
||||
if (channelMessage.Body.Length > ex.LinePosition + 10)
|
||||
{
|
||||
var errorChunk = channelMessage.Body.Substring(ex.LinePosition - 10, 20);
|
||||
terminal.WriteError($"Worker received invalid Json at position '{ex.LinePosition}': {errorChunk} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(errorChunk))})");
|
||||
}
|
||||
|
||||
throw;
|
||||
}
|
||||
|
||||
var jobMessage = StringUtil.ConvertFromJson<Pipelines.AgentJobRequestMessage>(channelMessage.Body);
|
||||
ArgUtil.NotNull(jobMessage, nameof(jobMessage));
|
||||
HostContext.WritePerfCounter($"WorkerJobMessageReceived_{jobMessage.RequestId.ToString()}");
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/*
|
||||
/*
|
||||
* ---------------------------------------------------------
|
||||
* Copyright(C) Microsoft Corporation. All rights reserved.
|
||||
* ---------------------------------------------------------
|
||||
@@ -324,7 +324,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
|
||||
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
|
||||
/// <param name="planId"></param>
|
||||
/// <param name="jobId"></param>
|
||||
/// <param name="actionReferenceList"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
@@ -332,7 +331,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
Guid scopeIdentifier,
|
||||
string hubName,
|
||||
Guid planId,
|
||||
Guid jobId,
|
||||
ActionReferenceList actionReferenceList,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
@@ -341,15 +339,12 @@ namespace GitHub.DistributedTask.WebApi
|
||||
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
|
||||
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
|
||||
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
|
||||
|
||||
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
|
||||
queryParams.Add("jobId", jobId);
|
||||
|
||||
return SendAsync<ActionDownloadInfoCollection>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(6.0, 1),
|
||||
queryParameters: queryParams,
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken,
|
||||
content: content);
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
@@ -10,13 +8,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
[DataContract]
|
||||
public class ActionsStepTelemetry
|
||||
{
|
||||
public ActionsStepTelemetry()
|
||||
{
|
||||
this.ErrorMessages = new List<string>();
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string Action { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string Ref { get; set; }
|
||||
@@ -24,15 +15,9 @@ namespace GitHub.DistributedTask.WebApi
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string Type { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string Stage { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public Guid StepId { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool? HasRunsStep { get; set; }
|
||||
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool? HasUsesStep { get; set; }
|
||||
|
||||
@@ -47,14 +32,5 @@ namespace GitHub.DistributedTask.WebApi
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public int? StepCount { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public TaskResult? Result { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public List<string> ErrorMessages { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public int? ExecutionTimeInSeconds { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
@@ -111,43 +110,5 @@ namespace GitHub.DistributedTask.WebApi
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A set of trimmed down packages:
|
||||
/// - the package without 'externals'
|
||||
/// - the package without 'dotnet runtime'
|
||||
/// - the package without 'dotnet runtime' and 'externals'
|
||||
/// </summary>
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public List<TrimmedPackageMetadata> TrimmedPackages
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public class TrimmedPackageMetadata
|
||||
{
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string HashValue { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string DownloadUrl { get; set; }
|
||||
|
||||
public Dictionary<string, string> TrimmedContents
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_trimmedContents == null)
|
||||
{
|
||||
m_trimmedContents = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
return m_trimmedContents;
|
||||
}
|
||||
}
|
||||
|
||||
[DataMember(Name = "TrimmedContents", EmitDefaultValue = false)]
|
||||
private Dictionary<string, string> m_trimmedContents;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
this.ProvisioningState = referenceToBeCloned.ProvisioningState;
|
||||
this.AccessPoint = referenceToBeCloned.AccessPoint;
|
||||
this.Ephemeral = referenceToBeCloned.Ephemeral;
|
||||
this.DisableUpdate = referenceToBeCloned.DisableUpdate;
|
||||
|
||||
if (referenceToBeCloned.m_links != null)
|
||||
{
|
||||
@@ -93,16 +92,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether or not this agent should auto-update to latest version.
|
||||
/// </summary>
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool? DisableUpdate
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Whether or not the agent is online.
|
||||
/// </summary>
|
||||
|
||||
@@ -102,10 +102,4 @@ namespace GitHub.DistributedTask.WebApi
|
||||
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
|
||||
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
|
||||
}
|
||||
|
||||
[GenerateAllConstants]
|
||||
public class ChecksAttachmentType
|
||||
{
|
||||
public static readonly String StepSummary = "Checks.Step.Summary";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,7 +66,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
_serviceControlManager = new Mock<ILinuxServiceControlManager>();
|
||||
#endif
|
||||
|
||||
var expectedAgent = new TaskAgent(_expectedAgentName) { Id = 1, Ephemeral = true, DisableUpdate = true };
|
||||
var expectedAgent = new TaskAgent(_expectedAgentName) { Id = 1 };
|
||||
expectedAgent.Authorization = new TaskAgentAuthorization
|
||||
{
|
||||
ClientId = Guid.NewGuid(),
|
||||
@@ -154,7 +154,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
tc,
|
||||
new[]
|
||||
{
|
||||
"configure",
|
||||
"configure",
|
||||
"--url", _expectedServerUrl,
|
||||
"--name", _expectedAgentName,
|
||||
"--runnergroup", _secondRunnerGroupName,
|
||||
@@ -163,8 +163,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
"--token", _expectedToken,
|
||||
"--labels", userLabels,
|
||||
"--ephemeral",
|
||||
"--disableupdate",
|
||||
"--unattended",
|
||||
});
|
||||
trace.Info("Constructed.");
|
||||
_store.Setup(x => x.IsConfigured()).Returns(false);
|
||||
@@ -187,7 +185,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
// validate GetAgentPoolsAsync gets called twice with automation pool type
|
||||
_runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.Is<TaskAgentPoolType>(p => p == TaskAgentPoolType.Automation)), Times.Exactly(2));
|
||||
|
||||
var expectedLabels = new List<string>() { "self-hosted", VarUtil.OS, VarUtil.OSArchitecture };
|
||||
var expectedLabels = new List<string>() { "self-hosted", VarUtil.OS, VarUtil.OSArchitecture};
|
||||
expectedLabels.AddRange(userLabels.Split(",").ToList());
|
||||
|
||||
_runnerServer.Verify(x => x.AddAgentAsync(It.IsAny<int>(), It.Is<TaskAgent>(a => a.Labels.Select(x => x.Name).ToHashSet().SetEquals(expectedLabels))), Times.Once);
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using Moq;
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading.Tasks;
|
||||
using Xunit;
|
||||
using System.Threading;
|
||||
using System.Reflection;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
@@ -256,67 +256,5 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
tokenSource.Token), Times.Once());
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void SkipDeleteSession_WhenGetNextMessageGetTaskAgentAccessTokenExpiredException()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
using (var tokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
// Arrange.
|
||||
var expectedSession = new TaskAgentSession();
|
||||
PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(sessionIdProperty);
|
||||
sessionIdProperty.SetValue(expectedSession, Guid.NewGuid());
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.CreateAgentSessionAsync(
|
||||
_settings.PoolId,
|
||||
It.Is<TaskAgentSession>(y => y != null),
|
||||
tokenSource.Token))
|
||||
.Returns(Task.FromResult(expectedSession));
|
||||
|
||||
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
|
||||
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
|
||||
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
|
||||
|
||||
// Act.
|
||||
MessageListener listener = new MessageListener();
|
||||
listener.Initialize(tc);
|
||||
|
||||
bool result = await listener.CreateSessionAsync(tokenSource.Token);
|
||||
Assert.True(result);
|
||||
|
||||
_runnerServer
|
||||
.Setup(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token))
|
||||
.Throws(new TaskAgentAccessTokenExpiredException("test"));
|
||||
try
|
||||
{
|
||||
await listener.GetNextMessageAsync(tokenSource.Token);
|
||||
}
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
//expected
|
||||
}
|
||||
finally
|
||||
{
|
||||
await listener.DeleteSessionAsync();
|
||||
}
|
||||
|
||||
//Assert
|
||||
_runnerServer
|
||||
.Verify(x => x.GetAgentMessageAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token), Times.Once);
|
||||
|
||||
_runnerServer
|
||||
.Verify(x => x.DeleteAgentSessionAsync(
|
||||
_settings.PoolId, expectedSession.SessionId, It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Moq;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Xunit;
|
||||
using System.IO;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Listener
|
||||
{
|
||||
@@ -22,12 +17,11 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
private Mock<IConfigurationStore> _configStore;
|
||||
private Mock<IJobDispatcher> _jobDispatcher;
|
||||
private AgentRefreshMessage _refreshMessage = new AgentRefreshMessage(1, "2.299.0");
|
||||
private List<TrimmedPackageMetadata> _trimmedPackages = new List<TrimmedPackageMetadata>();
|
||||
|
||||
#if !OS_WINDOWS
|
||||
private string _packageUrl = null;
|
||||
private string _packageUrl = $"https://github.com/actions/runner/releases/download/v2.285.1/actions-runner-{BuildConstants.RunnerPackage.PackageName}-2.285.1.tar.gz";
|
||||
#else
|
||||
private string _packageUrl = null;
|
||||
private string _packageUrl = $"https://github.com/actions/runner/releases/download/v2.285.1/actions-runner-{BuildConstants.RunnerPackage.PackageName}-2.285.1.zip";
|
||||
#endif
|
||||
public SelfUpdaterL0()
|
||||
{
|
||||
@@ -37,44 +31,8 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
_jobDispatcher = new Mock<IJobDispatcher>();
|
||||
_configStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1, AgentId = 1 });
|
||||
|
||||
Environment.SetEnvironmentVariable("_GITHUB_ACTION_EXECUTE_UPDATE_SCRIPT", "1");
|
||||
}
|
||||
|
||||
private async Task FetchLatestRunner()
|
||||
{
|
||||
var latestVersion = "";
|
||||
var httpClientHandler = new HttpClientHandler();
|
||||
httpClientHandler.AllowAutoRedirect = false;
|
||||
using (var client = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var response = await client.SendAsync(new HttpRequestMessage(HttpMethod.Get, "https://github.com/actions/runner/releases/latest"));
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.Redirect)
|
||||
{
|
||||
var redirect = await response.Content.ReadAsStringAsync();
|
||||
Regex regex = new Regex(@"/runner/releases/tag/v(?<version>\d+\.\d+\.\d+)");
|
||||
var match = regex.Match(redirect);
|
||||
if (match.Success)
|
||||
{
|
||||
latestVersion = match.Groups["version"].Value;
|
||||
|
||||
#if !OS_WINDOWS
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.tar.gz";
|
||||
#else
|
||||
_packageUrl = $"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}.zip";
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
var json = await client.GetStringAsync($"https://github.com/actions/runner/releases/download/v{latestVersion}/actions-runner-{BuildConstants.RunnerPackage.PackageName}-{latestVersion}-trimmedpackages.json");
|
||||
_trimmedPackages = StringUtil.ConvertFromJson<List<TrimmedPackageMetadata>>(json);
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl }));
|
||||
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -82,59 +40,39 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync()
|
||||
{
|
||||
try
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p = new ProcessInvokerWrapper();
|
||||
p.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,51 +81,27 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_NoUpdateOnOldVersion()
|
||||
{
|
||||
try
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
updater.Initialize(hc);
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.200.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.200.0"), DownloadUrl = _packageUrl }));
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.200.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.200.0"), DownloadUrl = _packageUrl }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
var result = await updater.SelfUpdate(new AgentRefreshMessage(1, "2.200.0"), _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.False(result);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
var result = await updater.SelfUpdate(new AgentRefreshMessage(1, "2.200.0"), _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.False(result);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,53 +110,33 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_DownloadRetry()
|
||||
{
|
||||
try
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = $"https://github.com/actions/runner/notexists" }));
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = $"https://github.com/actions/runner/notexists" }));
|
||||
var p = new ProcessInvokerWrapper();
|
||||
p.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
|
||||
var ex = await Assert.ThrowsAsync<TaskCanceledException>(() => updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains($"failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
var ex = await Assert.ThrowsAsync<TaskCanceledException>(() => updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains($"failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts", ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,542 +145,33 @@ namespace GitHub.Runner.Common.Tests.Listener
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_ValidateHash()
|
||||
{
|
||||
try
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, HashValue = "bad_hash" }));
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, HashValue = "bad_hash" }));
|
||||
var p = new ProcessInvokerWrapper();
|
||||
p.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
|
||||
var ex = await Assert.ThrowsAsync<Exception>(() => updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains("did not match expected Runner Hash", ex.Message);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_CloneHash_RuntimeAndExternals()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, TrimmedPackages = new List<TrimmedPackageMetadata>() { new TrimmedPackageMetadata() } }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
|
||||
Assert.Equal(File.ReadAllText(dotnetRuntimeHashFile).Trim(), contentHashes["dotnetRuntime"]);
|
||||
Assert.Equal(File.ReadAllText(externalsHashFile).Trim(), contentHashes["externals"]);
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_Cancel_CloneHashTask_WhenNotNeeded()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new Mock<IHttpClientHandlerFactory>().Object);
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper();
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper();
|
||||
p3.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
updater.Initialize(hc);
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
|
||||
FieldInfo contentHashesProperty = updater.GetType().GetField("_contentHashes", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
|
||||
Assert.NotNull(contentHashesProperty);
|
||||
Dictionary<string, string> contentHashes = (Dictionary<string, string>)contentHashesProperty.GetValue(updater);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(contentHashes));
|
||||
|
||||
Assert.NotEqual(2, contentHashes.Count);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
hc.GetTrace().Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => !x.TrimmedContents.ContainsKey("dotnetRuntime")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_UseExternalsRuntimeTrimmedPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.Where(x => x.TrimmedContents.ContainsKey("dotnetRuntime") && x.TrimmedContents.ContainsKey("externals")).ToList();
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var externalsHash = await File.ReadAllTextAsync(externalsHashFile);
|
||||
|
||||
var runtimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
var runtimeHash = await File.ReadAllTextAsync(runtimeHashFile);
|
||||
|
||||
if (externalsHash == trim[0].TrimmedContents["externals"] &&
|
||||
runtimeHash == trim[0].TrimmedContents["dotnetRuntime"])
|
||||
{
|
||||
Assert.Contains("Use trimmed (runtime+externals) package", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_NotUseExternalsRuntimeTrimmedPackageOnHashMismatch()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // node -v
|
||||
p4.Initialize(hc);
|
||||
var p5 = new ProcessInvokerWrapper(); // node -v
|
||||
p5.Initialize(hc);
|
||||
var p6 = new ProcessInvokerWrapper(); // runner -v
|
||||
p6.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p5);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p6);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
foreach (var hash in package.TrimmedContents.Keys)
|
||||
{
|
||||
package.TrimmedContents[hash] = "mismatch";
|
||||
}
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
Assert.Contains("the current runner does not carry those trimmed content (Hash mismatch)", File.ReadAllText(traceFile));
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Runner")]
|
||||
public async void TestSelfUpdateAsync_FallbackToFullPackage()
|
||||
{
|
||||
try
|
||||
{
|
||||
await FetchLatestRunner();
|
||||
Assert.NotNull(_packageUrl);
|
||||
Assert.NotNull(_trimmedPackages);
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", Path.GetFullPath(Path.Combine(TestUtil.GetSrcPath(), "..", "_layout", "bin")));
|
||||
using (var hc = new TestHostContext(this))
|
||||
{
|
||||
hc.GetTrace().Info(_packageUrl);
|
||||
hc.GetTrace().Info(StringUtil.ConvertToJson(_trimmedPackages));
|
||||
|
||||
//Arrange
|
||||
var updater = new Runner.Listener.SelfUpdater();
|
||||
hc.SetSingleton<ITerminal>(_term.Object);
|
||||
hc.SetSingleton<IRunnerServer>(_runnerServer.Object);
|
||||
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
|
||||
hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
|
||||
|
||||
var p1 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p1.Initialize(hc);
|
||||
var p2 = new ProcessInvokerWrapper(); // hashfiles
|
||||
p2.Initialize(hc);
|
||||
var p3 = new ProcessInvokerWrapper(); // un-tar trim
|
||||
p3.Initialize(hc);
|
||||
var p4 = new ProcessInvokerWrapper(); // un-tar full
|
||||
p4.Initialize(hc);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p1);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p2);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p3);
|
||||
hc.EnqueueInstance<IProcessInvoker>(p4);
|
||||
updater.Initialize(hc);
|
||||
|
||||
var trim = _trimmedPackages.ToList();
|
||||
foreach (var package in trim)
|
||||
{
|
||||
package.HashValue = "mismatch";
|
||||
}
|
||||
|
||||
_runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.299.0", true, It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.299.0"), DownloadUrl = _packageUrl, TrimmedPackages = trim }));
|
||||
|
||||
_runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((int p, int a, string s, string t) =>
|
||||
{
|
||||
hc.GetTrace().Info(t);
|
||||
})
|
||||
.Returns(Task.FromResult(new TaskAgent()));
|
||||
|
||||
try
|
||||
{
|
||||
var result = await updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken);
|
||||
Assert.True(result);
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0")));
|
||||
Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "bin.2.299.0"), CancellationToken.None);
|
||||
IOUtil.DeleteDirectory(Path.Combine(hc.GetDirectory(WellKnownDirectory.Root), "externals.2.299.0"), CancellationToken.None);
|
||||
}
|
||||
|
||||
var traceFile = Path.GetTempFileName();
|
||||
File.Copy(hc.TraceFileName, traceFile, true);
|
||||
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
|
||||
{
|
||||
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
|
||||
}
|
||||
else
|
||||
{
|
||||
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR", null);
|
||||
var ex = await Assert.ThrowsAsync<Exception>(() => updater.SelfUpdate(_refreshMessage, _jobDispatcher.Object, true, hc.RunnerShutdownToken));
|
||||
Assert.Contains("did not match expected Runner Hash", ex.Message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Xunit;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System.Threading.Channels;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
@@ -150,128 +150,5 @@ namespace GitHub.Runner.Common.Tests
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckDotnetRuntimeHash()
|
||||
{
|
||||
using (TestHostContext hc = new TestHostContext(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}");
|
||||
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node12\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node12/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public async Task RunnerLayoutParts_CheckExternalsHash()
|
||||
{
|
||||
using (TestHostContext hc = new TestHostContext(this))
|
||||
{
|
||||
Tracing trace = hc.GetTrace();
|
||||
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
|
||||
trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}");
|
||||
|
||||
string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals");
|
||||
|
||||
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
|
||||
|
||||
#if OS_WINDOWS
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node12\bin\node");
|
||||
#else
|
||||
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node12/bin/node");
|
||||
#endif
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
var p1 = new ProcessInvokerWrapper();
|
||||
p1.Initialize(hc);
|
||||
|
||||
p1.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
p1.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
trace.Info(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: true,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using Xunit;
|
||||
@@ -16,7 +15,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
|
||||
settings.AgentName = "thisiskindofalongrunnerabcde";
|
||||
settings.AgentName = "thisiskindofalongrunnername1";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
|
||||
|
||||
@@ -44,7 +43,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
Assert.Equal("actions", serviceNameParts[0]);
|
||||
Assert.Equal("runner", serviceNameParts[1]);
|
||||
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
|
||||
Assert.Equal("thisiskindofalongrunnerabcde", serviceNameParts[3]);
|
||||
Assert.Equal("thisiskindofalongrunnername1", serviceNameParts[3]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +54,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
|
||||
settings.AgentName = "thisiskindofalongrunnernabcde";
|
||||
settings.AgentName = "thisiskindofalongrunnername12";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
|
||||
|
||||
@@ -83,129 +82,88 @@ namespace GitHub.Runner.Common.Tests
|
||||
Assert.Equal("actions", serviceNameParts[0]);
|
||||
Assert.Equal("runner", serviceNameParts[1]);
|
||||
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
|
||||
Assert.Equal("thisiskindofalongrunnernabcde", serviceNameParts[3]); // should not have random numbers unless we exceed 80
|
||||
Assert.Equal("thisiskindofalongrunnername12", serviceNameParts[3]);
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Service")]
|
||||
public void CalculateServiceNameLimitsServiceNameTo80Chars()
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Service")]
|
||||
public void CalculateServiceNameLimitsServiceNameTo80Chars()
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
|
||||
settings.AgentName = "thisisareallyreallylongbutstillvalidagentname";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample";
|
||||
|
||||
string serviceNamePattern = "actions.runner.{0}.{1}";
|
||||
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
|
||||
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
ServiceControlManager scm = new ServiceControlManager();
|
||||
|
||||
settings.AgentName = "thisisareallyreallylongbutstillvalidagentname";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample";
|
||||
scm.Initialize(hc);
|
||||
scm.CalculateServiceName(
|
||||
settings,
|
||||
serviceNamePattern,
|
||||
serviceDisplayNamePattern,
|
||||
out string serviceName,
|
||||
out string serviceDisplayName);
|
||||
|
||||
string serviceNamePattern = "actions.runner.{0}.{1}";
|
||||
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
|
||||
// Verify name has been shortened to 80 characters
|
||||
Assert.Equal(80, serviceName.Length);
|
||||
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
ServiceControlManager scm = new ServiceControlManager();
|
||||
var serviceNameParts = serviceName.Split('.');
|
||||
|
||||
scm.Initialize(hc);
|
||||
scm.CalculateServiceName(
|
||||
settings,
|
||||
serviceNamePattern,
|
||||
serviceDisplayNamePattern,
|
||||
out string serviceName,
|
||||
out string serviceDisplayName);
|
||||
|
||||
// Verify name has been shortened to 80 characters
|
||||
Assert.Equal(80, serviceName.Length);
|
||||
|
||||
var serviceNameParts = serviceName.Split('.');
|
||||
|
||||
// Verify that each component has been shortened to a sensible length
|
||||
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
|
||||
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
|
||||
Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-'
|
||||
Assert.Matches(@"^(thisisareallyr-[0-9]{4})$", serviceNameParts[3]); // Remainder of unused chars, 4 random numbers added at the end
|
||||
}
|
||||
// Verify that each component has been shortened to a sensible length
|
||||
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
|
||||
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
|
||||
Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-'
|
||||
Assert.Equal("thisisareallyreally", serviceNameParts[3]); // Remainder of unused chars
|
||||
}
|
||||
}
|
||||
|
||||
// Special 'defensive' test that verifies we can gracefully handle creating service names
|
||||
// in case GitHub.com changes its org/repo naming convention in the future,
|
||||
// and some of these characters may be invalid for service names
|
||||
// Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Service")]
|
||||
public void CalculateServiceNameSanitizeOutOfRangeChars()
|
||||
// Special 'defensive' test that verifies we can gracefully handle creating service names
|
||||
// in case GitHub.com changes its org/repo naming convention in the future,
|
||||
// and some of these characters may be invalid for service names
|
||||
// Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Service")]
|
||||
public void CalculateServiceNameSanitizeOutOfRangeChars()
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
|
||||
settings.AgentName = "name";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()";
|
||||
|
||||
string serviceNamePattern = "actions.runner.{0}.{1}";
|
||||
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
|
||||
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
ServiceControlManager scm = new ServiceControlManager();
|
||||
|
||||
settings.AgentName = "name";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()";
|
||||
scm.Initialize(hc);
|
||||
scm.CalculateServiceName(
|
||||
settings,
|
||||
serviceNamePattern,
|
||||
serviceDisplayNamePattern,
|
||||
out string serviceName,
|
||||
out string serviceDisplayName);
|
||||
|
||||
string serviceNamePattern = "actions.runner.{0}.{1}";
|
||||
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
|
||||
var serviceNameParts = serviceName.Split('.');
|
||||
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
ServiceControlManager scm = new ServiceControlManager();
|
||||
|
||||
scm.Initialize(hc);
|
||||
scm.CalculateServiceName(
|
||||
settings,
|
||||
serviceNamePattern,
|
||||
serviceDisplayNamePattern,
|
||||
out string serviceName,
|
||||
out string serviceDisplayName);
|
||||
|
||||
var serviceNameParts = serviceName.Split('.');
|
||||
|
||||
// Verify service name parts are sanitized correctly
|
||||
Assert.Equal("actions", serviceNameParts[0]);
|
||||
Assert.Equal("runner", serviceNameParts[1]);
|
||||
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
|
||||
Assert.Equal("name", serviceNameParts[3]);
|
||||
}
|
||||
// Verify service name parts are sanitized correctly
|
||||
Assert.Equal("actions", serviceNameParts[0]);
|
||||
Assert.Equal("runner", serviceNameParts[1]);
|
||||
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
|
||||
Assert.Equal("name", serviceNameParts[3]);
|
||||
}
|
||||
#else
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Service")]
|
||||
public void CalculateServiceNameLimitsServiceNameTo150Chars()
|
||||
{
|
||||
RunnerSettings settings = new RunnerSettings();
|
||||
|
||||
settings.AgentName = "thisisareallyreallylongbutstillvalidagentnameiamusingforthisexampletotestverylongnamelimits";
|
||||
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
|
||||
settings.GitHubUrl = "https://github.com/myreallylongorganizationexampleonlinux/myreallylongrepoexampleonlinux1234";
|
||||
|
||||
string serviceNamePattern = "actions.runner.{0}.{1}";
|
||||
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
|
||||
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
ServiceControlManager scm = new ServiceControlManager();
|
||||
|
||||
scm.Initialize(hc);
|
||||
scm.CalculateServiceName(
|
||||
settings,
|
||||
serviceNamePattern,
|
||||
serviceDisplayNamePattern,
|
||||
out string serviceName,
|
||||
out string serviceDisplayName);
|
||||
|
||||
// Verify name has been shortened to 150
|
||||
Assert.Equal(150, serviceName.Length);
|
||||
|
||||
var serviceNameParts = serviceName.Split('.');
|
||||
|
||||
// Verify that each component has been shortened to a sensible length
|
||||
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
|
||||
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
|
||||
Assert.Equal("myreallylongorganizationexampleonlinux-myreallylongrepoexampleonlinux1", serviceNameParts[2]); // First 70 chars, '/' has been replaced with '-'
|
||||
Assert.Matches(@"^(thisisareallyreallylongbutstillvalidagentnameiamusingforthi-[0-9]{4})$", serviceNameParts[3]);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
|
||||
@@ -165,15 +165,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
switch (directory)
|
||||
{
|
||||
case WellKnownDirectory.Bin:
|
||||
var overwriteBinDir = Environment.GetEnvironmentVariable("RUNNER_L0_OVERRIDEBINDIR");
|
||||
if (Directory.Exists(overwriteBinDir))
|
||||
{
|
||||
path = overwriteBinDir;
|
||||
}
|
||||
else
|
||||
{
|
||||
path = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
}
|
||||
path = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
break;
|
||||
|
||||
case WellKnownDirectory.Diag:
|
||||
|
||||
@@ -121,7 +121,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
_ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>();
|
||||
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
|
||||
var expressionValues = new DictionaryContextData
|
||||
{
|
||||
["env"] =
|
||||
@@ -132,6 +131,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
#endif
|
||||
};
|
||||
_ec.Setup(x => x.ExpressionValues).Returns(expressionValues);
|
||||
_ec.Setup(x => x.JobTelemetry).Returns(new List<JobTelemetry>());
|
||||
|
||||
Assert.True(_commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null));
|
||||
}
|
||||
@@ -148,8 +148,8 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
_ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>();
|
||||
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
|
||||
_ec.Setup(x => x.ExpressionValues).Returns(GetExpressionValues());
|
||||
_ec.Setup(x => x.JobTelemetry).Returns(new List<JobTelemetry>());
|
||||
Assert.Throws<Exception>(() => _commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
@@ -2135,7 +2135,6 @@ runs:
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext());
|
||||
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);
|
||||
_ec.Setup(x => x.Root).Returns(new GitHub.Runner.Worker.ExecutionContext());
|
||||
var variables = new Dictionary<string, VariableValue>();
|
||||
if (enableComposite)
|
||||
{
|
||||
@@ -2157,8 +2156,8 @@ runs:
|
||||
_dockerManager.Setup(x => x.DockerBuild(_ec.Object, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(0));
|
||||
|
||||
_jobServer = new Mock<IJobServer>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
|
||||
@@ -118,7 +118,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
await _actionRunner.RunAsync();
|
||||
|
||||
//Assert
|
||||
_ec.Verify(x => x.WriteWebhookPayload(), Times.Once);
|
||||
_ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -1,289 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using DTWebApi = GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class CreateStepSummaryCommandL0
|
||||
{
|
||||
private Mock<IExecutionContext> _executionContext;
|
||||
private Mock<IJobServerQueue> _jobServerQueue;
|
||||
private ExecutionContext _jobExecutionContext;
|
||||
private List<Tuple<DTWebApi.Issue, string>> _issues;
|
||||
private Variables _variables;
|
||||
private string _rootDirectory;
|
||||
private CreateStepSummaryCommand _createStepCommand;
|
||||
private ITraceWriter _trace;
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_FeatureDisabled()
|
||||
{
|
||||
using (var hostContext = Setup(featureFlagState: "false"))
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "feature-off");
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_FileNull()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, null, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_DirectoryNotFound()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_FileNotFound()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "file-not-found");
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_EmptyFile()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "empty-file");
|
||||
File.Create(stepSummaryFile).Dispose();
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_LargeFile()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "empty-file");
|
||||
File.WriteAllBytes(stepSummaryFile, new byte[128 * 1024 + 1]);
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
|
||||
Assert.Equal(1, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_Simple()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "simple");
|
||||
var content = new List<string>
|
||||
{
|
||||
"# This is some markdown content",
|
||||
"",
|
||||
"## This is more markdown content",
|
||||
};
|
||||
WriteContent(stepSummaryFile, content);
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), ChecksAttachmentType.StepSummary, _executionContext.Object.Id.ToString(), stepSummaryFile + "-scrubbed", It.IsAny<bool>()), Times.Once());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CreateStepSummaryCommand_ScrubSecrets()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
// configure secretmasker to actually mask secrets
|
||||
hostContext.SecretMasker.AddRegex("Password=.*");
|
||||
hostContext.SecretMasker.AddRegex("ghs_.*");
|
||||
|
||||
var stepSummaryFile = Path.Combine(_rootDirectory, "simple");
|
||||
var scrubbedFile = stepSummaryFile + "-scrubbed";
|
||||
var content = new List<string>
|
||||
{
|
||||
"# Password=ThisIsMySecretPassword!",
|
||||
"",
|
||||
"# GITHUB_TOKEN ghs_verysecuretoken",
|
||||
};
|
||||
WriteContent(stepSummaryFile, content);
|
||||
|
||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||
|
||||
var scrubbedFileContents = File.ReadAllText(scrubbedFile);
|
||||
Assert.DoesNotContain("ThisIsMySecretPassword!", scrubbedFileContents);
|
||||
Assert.DoesNotContain("ghs_verysecuretoken", scrubbedFileContents);
|
||||
_jobExecutionContext.Complete();
|
||||
|
||||
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), ChecksAttachmentType.StepSummary, _executionContext.Object.Id.ToString(), scrubbedFile, It.IsAny<bool>()), Times.Once());
|
||||
Assert.Equal(0, _issues.Count);
|
||||
}
|
||||
}
|
||||
|
||||
private void WriteContent(
|
||||
string path,
|
||||
List<string> content,
|
||||
string newline = null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(newline))
|
||||
{
|
||||
newline = Environment.NewLine;
|
||||
}
|
||||
|
||||
var encoding = new UTF8Encoding(true); // Emit BOM
|
||||
var contentStr = string.Join(newline, content);
|
||||
File.WriteAllText(path, contentStr, encoding);
|
||||
}
|
||||
|
||||
private TestHostContext Setup([CallerMemberName] string name = "", string featureFlagState = "true")
|
||||
{
|
||||
var hostContext = new TestHostContext(this, name);
|
||||
|
||||
_issues = new List<Tuple<DTWebApi.Issue, string>>();
|
||||
|
||||
// Setup a job request
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "Summary Job";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
jobRequest.Variables["ACTIONS_STEP_DEBUG"] = "true";
|
||||
|
||||
// Server queue for job
|
||||
_jobServerQueue = new Mock<IJobServerQueue>();
|
||||
_jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
hostContext.SetSingleton(_jobServerQueue.Object);
|
||||
|
||||
// Configuration store (required singleton)
|
||||
var configurationStore = new Mock<IConfigurationStore>();
|
||||
configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings());
|
||||
hostContext.SetSingleton(configurationStore.Object);
|
||||
|
||||
// Paging Logger (required singleton)
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
hostContext.EnqueueInstance(pagingLogger.Object);
|
||||
|
||||
// Trace
|
||||
_trace = hostContext.GetTrace();
|
||||
|
||||
// Variables to test for secret scrubbing & FF options
|
||||
_variables = new Variables(hostContext, new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ "MySecretName", new VariableValue("My secret value", true) },
|
||||
{ "DistributedTask.UploadStepSummary", featureFlagState },
|
||||
});
|
||||
|
||||
// Directory for test data
|
||||
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
|
||||
Directory.CreateDirectory(workDirectory);
|
||||
_rootDirectory = Path.Combine(workDirectory, nameof(CreateStepSummaryCommandL0));
|
||||
Directory.CreateDirectory(_rootDirectory);
|
||||
|
||||
// Job execution context
|
||||
_jobExecutionContext = new ExecutionContext();
|
||||
_jobExecutionContext.Initialize(hostContext);
|
||||
_jobExecutionContext.InitializeJob(jobRequest, System.Threading.CancellationToken.None);
|
||||
|
||||
// Step execution context
|
||||
_executionContext = new Mock<IExecutionContext>();
|
||||
_executionContext.Setup(x => x.Global)
|
||||
.Returns(new GlobalContext
|
||||
{
|
||||
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||
WriteDebug = true,
|
||||
Variables = _variables,
|
||||
});
|
||||
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<string>()))
|
||||
.Callback((DTWebApi.Issue issue, string logMessage) =>
|
||||
{
|
||||
_issues.Add(new Tuple<DTWebApi.Issue, string>(issue, logMessage));
|
||||
var message = !string.IsNullOrEmpty(logMessage) ? logMessage : issue.Message;
|
||||
_trace.Info($"Issue '{issue.Type}': {message}");
|
||||
});
|
||||
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Callback((string tag, string message) =>
|
||||
{
|
||||
_trace.Info($"{tag}{message}");
|
||||
});
|
||||
_executionContext.SetupGet(x => x.Root).Returns(_jobExecutionContext);
|
||||
|
||||
//CreateStepSummaryCommand
|
||||
_createStepCommand = new CreateStepSummaryCommand();
|
||||
_createStepCommand.Initialize(hostContext);
|
||||
|
||||
return hostContext;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
using System;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker;
|
||||
using Moq;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
@@ -90,109 +90,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void AddIssue_TrimMessageSize()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange: Create a job request message.
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
|
||||
// Arrange: Setup the paging logger.
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
|
||||
var ec = new Runner.Worker.ExecutionContext();
|
||||
ec.Initialize(hc);
|
||||
|
||||
// Act.
|
||||
ec.InitializeJob(jobRequest, CancellationToken.None);
|
||||
|
||||
var bigMessage = "";
|
||||
for (var i = 0; i < 5000; i++)
|
||||
{
|
||||
bigMessage += "a";
|
||||
}
|
||||
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = bigMessage });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = bigMessage });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = bigMessage });
|
||||
|
||||
ec.Complete();
|
||||
|
||||
// Assert.
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Error).Single().Message.Length <= 4096)), Times.AtLeastOnce);
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Warning).Single().Message.Length <= 4096)), Times.AtLeastOnce);
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Notice).Single().Message.Length <= 4096)), Times.AtLeastOnce);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void AddIssue_AddStepAndLineNumberInformation()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
|
||||
// Arrange: Setup the paging logger.
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var pagingLogger2 = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.EnqueueInstance(pagingLogger2.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
|
||||
var ec = new Runner.Worker.ExecutionContext();
|
||||
ec.Initialize(hc);
|
||||
ec.InitializeJob(jobRequest, CancellationToken.None);
|
||||
ec.Start();
|
||||
|
||||
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
|
||||
embeddedStep.Start();
|
||||
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error annotation that should have step and line number information" });
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning annotation that should have step and line number information" });
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice annotation that should have step and line number information" });
|
||||
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Error).Count() == 1)), Times.AtLeastOnce);
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Warning).Count() == 1)), Times.AtLeastOnce);
|
||||
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Notice).Count() == 1)), Times.AtLeastOnce);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
@@ -219,7 +116,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<long>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
|
||||
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(),It.IsAny<long>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
@@ -481,177 +378,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void PublishStepTelemetry_RegularStep_NoOpt()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange: Create a job request message.
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
|
||||
// Arrange: Setup the paging logger.
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
|
||||
var ec = new Runner.Worker.ExecutionContext();
|
||||
ec.Initialize(hc);
|
||||
|
||||
// Act.
|
||||
ec.InitializeJob(jobRequest, CancellationToken.None);
|
||||
ec.Start();
|
||||
|
||||
ec.Complete();
|
||||
|
||||
// Assert.
|
||||
Assert.Equal(0, ec.Global.StepsTelemetry.Count);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void PublishStepTelemetry_RegularStep()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange: Create a job request message.
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
|
||||
// Arrange: Setup the paging logger.
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
|
||||
var ec = new Runner.Worker.ExecutionContext();
|
||||
ec.Initialize(hc);
|
||||
|
||||
// Act.
|
||||
ec.InitializeJob(jobRequest, CancellationToken.None);
|
||||
ec.Start();
|
||||
|
||||
ec.StepTelemetry.Type = "node16";
|
||||
ec.StepTelemetry.Action = "actions/checkout";
|
||||
ec.StepTelemetry.Ref = "v2";
|
||||
ec.StepTelemetry.IsEmbedded = false;
|
||||
ec.StepTelemetry.StepId = Guid.NewGuid();
|
||||
ec.StepTelemetry.Stage = "main";
|
||||
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
|
||||
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
|
||||
|
||||
ec.Complete();
|
||||
|
||||
// Assert.
|
||||
Assert.Equal(1, ec.Global.StepsTelemetry.Count);
|
||||
Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type);
|
||||
Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action);
|
||||
Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref);
|
||||
Assert.Equal(TaskResult.Succeeded, ec.Global.StepsTelemetry.Single().Result);
|
||||
Assert.NotNull(ec.Global.StepsTelemetry.Single().ExecutionTimeInSeconds);
|
||||
Assert.Equal(3, ec.Global.StepsTelemetry.Single().ErrorMessages.Count);
|
||||
Assert.DoesNotContain(ec.Global.StepsTelemetry.Single().ErrorMessages, x => x.Contains("notice"));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void PublishStepTelemetry_EmbeddedStep()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange: Create a job request message.
|
||||
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
|
||||
TimelineReference timeline = new TimelineReference();
|
||||
Guid jobId = Guid.NewGuid();
|
||||
string jobName = "some job name";
|
||||
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
|
||||
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
|
||||
{
|
||||
Alias = Pipelines.PipelineConstants.SelfAlias,
|
||||
Id = "github",
|
||||
Version = "sha1"
|
||||
});
|
||||
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
|
||||
|
||||
// Arrange: Setup the paging logger.
|
||||
var pagingLogger = new Mock<IPagingLogger>();
|
||||
var pagingLogger2 = new Mock<IPagingLogger>();
|
||||
var jobServerQueue = new Mock<IJobServerQueue>();
|
||||
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
|
||||
|
||||
hc.EnqueueInstance(pagingLogger.Object);
|
||||
hc.EnqueueInstance(pagingLogger2.Object);
|
||||
hc.SetSingleton(jobServerQueue.Object);
|
||||
|
||||
var ec = new Runner.Worker.ExecutionContext();
|
||||
ec.Initialize(hc);
|
||||
|
||||
// Act.
|
||||
ec.InitializeJob(jobRequest, CancellationToken.None);
|
||||
ec.Start();
|
||||
|
||||
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
|
||||
embeddedStep.Start();
|
||||
|
||||
embeddedStep.StepTelemetry.Type = "node16";
|
||||
embeddedStep.StepTelemetry.Action = "actions/checkout";
|
||||
embeddedStep.StepTelemetry.Ref = "v2";
|
||||
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
|
||||
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
|
||||
|
||||
embeddedStep.PublishStepTelemetry();
|
||||
|
||||
// Assert.
|
||||
Assert.Equal(1, ec.Global.StepsTelemetry.Count);
|
||||
Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type);
|
||||
Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action);
|
||||
Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref);
|
||||
Assert.Equal(ActionRunStage.Main.ToString(), ec.Global.StepsTelemetry.Single().Stage);
|
||||
Assert.True(ec.Global.StepsTelemetry.Single().IsEmbedded);
|
||||
Assert.Null(ec.Global.StepsTelemetry.Single().Result);
|
||||
Assert.Null(ec.Global.StepsTelemetry.Single().ExecutionTimeInSeconds);
|
||||
Assert.Equal(0, ec.Global.StepsTelemetry.Single().ErrorMessages.Count);
|
||||
}
|
||||
}
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
{
|
||||
var hc = new TestHostContext(this, testName);
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class HandlerFactoryL0
|
||||
{
|
||||
private Mock<IExecutionContext> _ec;
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
var hostContext = new TestHostContext(this, testName);
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_ec.SetupAllProperties();
|
||||
_ec.Object.Initialize(hostContext);
|
||||
var handler = new Mock<INodeScriptActionHandler>();
|
||||
handler.SetupAllProperties();
|
||||
hostContext.EnqueueInstance(handler.Object);
|
||||
//hostContext.EnqueueInstance(new ActionCommandManager() as IActionCommandManager);
|
||||
|
||||
return hostContext;
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
[InlineData("node12", "", "", "", "node12")]
|
||||
[InlineData("node12", "true", "", "", "node16")]
|
||||
[InlineData("node12", "true", "", "true", "node12")]
|
||||
[InlineData("node12", "true", "true", "", "node12")]
|
||||
[InlineData("node12", "true", "true", "true", "node12")]
|
||||
[InlineData("node12", "true", "false", "true", "node16")] // workflow overrides env
|
||||
[InlineData("node16", "", "", "", "node16")]
|
||||
[InlineData("node16", "true", "", "", "node16")]
|
||||
[InlineData("node16", "true", "", "true", "node16")]
|
||||
[InlineData("node16", "true", "true", "", "node16")]
|
||||
[InlineData("node16", "true", "true", "true", "node16")]
|
||||
[InlineData("node16", "true", "false", "true", "node16")]
|
||||
public void IsNodeVersionUpgraded(string inputVersion, string serverFeatureFlag, string workflowOptOut, string machineOptOut, string expectedVersion)
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
// Server Feature Flag
|
||||
var variables = new Dictionary<string, VariableValue>();
|
||||
if (!string.IsNullOrEmpty(serverFeatureFlag))
|
||||
{
|
||||
variables["DistributedTask.ForceGithubJavascriptActionsToNode16"] = serverFeatureFlag;
|
||||
}
|
||||
Variables serverVariables = new Variables(hc, variables);
|
||||
|
||||
// Workflow opt-out
|
||||
var workflowVariables = new Dictionary<string, string>();
|
||||
if (!string.IsNullOrEmpty(workflowOptOut))
|
||||
{
|
||||
workflowVariables[Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion] = workflowOptOut;
|
||||
}
|
||||
|
||||
// Machine opt-out
|
||||
if (!string.IsNullOrEmpty(machineOptOut))
|
||||
{
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, machineOptOut);
|
||||
}
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = workflowVariables
|
||||
});
|
||||
|
||||
|
||||
// Act.
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = inputVersion;
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
new ScriptReference(),
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()), "", new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
// Assert.
|
||||
Assert.Equal(expectedVersion, handler.Data.NodeVersion);
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
using System;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class HandlerL0
|
||||
{
|
||||
private Mock<IExecutionContext> _ec;
|
||||
private ActionsStepTelemetry _stepTelemetry;
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
{
|
||||
var hc = new TestHostContext(this, testName);
|
||||
_stepTelemetry = new ActionsStepTelemetry();
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_ec.SetupAllProperties();
|
||||
_ec.Setup(x => x.StepTelemetry).Returns(_stepTelemetry);
|
||||
|
||||
var trace = hc.GetTrace();
|
||||
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { trace.Info($"[{tag}]{message}"); });
|
||||
|
||||
hc.EnqueueInstance<IActionCommandManager>(new Mock<IActionCommandManager>().Object);
|
||||
return hc;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void PrepareExecution_PopulateTelemetry_RepoActions()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var nodeHandler = new NodeScriptActionHandler();
|
||||
nodeHandler.Initialize(hc);
|
||||
|
||||
nodeHandler.ExecutionContext = _ec.Object;
|
||||
nodeHandler.Action = new RepositoryPathReference()
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v2"
|
||||
};
|
||||
|
||||
// Act.
|
||||
nodeHandler.PrepareExecution(ActionRunStage.Main);
|
||||
hc.GetTrace().Info($"Telemetry: {StringUtil.ConvertToJson(_stepTelemetry)}");
|
||||
|
||||
// Assert.
|
||||
Assert.Equal("repository", _stepTelemetry.Type);
|
||||
Assert.Equal("actions/checkout", _stepTelemetry.Action);
|
||||
Assert.Equal("v2", _stepTelemetry.Ref);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void PrepareExecution_PopulateTelemetry_DockerActions()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var nodeHandler = new NodeScriptActionHandler();
|
||||
nodeHandler.Initialize(hc);
|
||||
|
||||
nodeHandler.ExecutionContext = _ec.Object;
|
||||
nodeHandler.Action = new ContainerRegistryReference()
|
||||
{
|
||||
Image = "ubuntu:20.04"
|
||||
};
|
||||
|
||||
// Act.
|
||||
nodeHandler.PrepareExecution(ActionRunStage.Main);
|
||||
hc.GetTrace().Info($"Telemetry: {StringUtil.ConvertToJson(_stepTelemetry)}");
|
||||
|
||||
// Assert.
|
||||
Assert.Equal("docker", _stepTelemetry.Type);
|
||||
Assert.Equal("ubuntu:20.04", _stepTelemetry.Action);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
private Mock<IPagingLogger> _logger;
|
||||
private Mock<IContainerOperationProvider> _containerProvider;
|
||||
private Mock<IDiagnosticLogManager> _diagnosticLogManager;
|
||||
private Mock<IJobHookProvider> _jobHookProvider;
|
||||
|
||||
private CancellationTokenSource _tokenSource;
|
||||
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
|
||||
@@ -41,7 +40,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
_directoryManager = new Mock<IPipelineDirectoryManager>();
|
||||
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
|
||||
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
|
||||
_jobHookProvider = new Mock<IJobHookProvider>();
|
||||
|
||||
IActionRunner step1 = new ActionRunner();
|
||||
IActionRunner step2 = new ActionRunner();
|
||||
@@ -113,9 +111,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
hc.SetSingleton(_containerProvider.Object);
|
||||
hc.SetSingleton(_directoryManager.Object);
|
||||
hc.SetSingleton(_diagnosticLogManager.Object);
|
||||
hc.SetSingleton(_jobHookProvider.Object);
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2
|
||||
@@ -124,7 +120,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2
|
||||
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job complete hook
|
||||
|
||||
hc.EnqueueInstance<IActionRunner>(step1);
|
||||
hc.EnqueueInstance<IActionRunner>(step2);
|
||||
@@ -353,62 +348,5 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task EnsurePreAndPostHookStepsIfEnvExists()
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", "/foo/bar");
|
||||
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", "/bar/foo");
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
var jobExtension = new JobExtension();
|
||||
jobExtension.Initialize(hc);
|
||||
|
||||
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
|
||||
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
|
||||
|
||||
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);
|
||||
|
||||
var trace = hc.GetTrace();
|
||||
|
||||
var hookStart = result.First() as JobExtensionRunner;
|
||||
|
||||
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
|
||||
|
||||
Assert.Equal(Constants.Hooks.JobStartedStepName, hookStart.DisplayName);
|
||||
Assert.Equal(Constants.Hooks.JobCompletedStepName, (_jobEc.PostJobSteps.Last() as JobExtensionRunner).DisplayName);
|
||||
}
|
||||
|
||||
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", null);
|
||||
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", null);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EnsureNoPreAndPostHookSteps()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
var jobExtension = new JobExtension();
|
||||
jobExtension.Initialize(hc);
|
||||
|
||||
_message.ActionsEnvironment = null;
|
||||
|
||||
_jobEc = new Runner.Worker.ExecutionContext {Result = TaskResult.Succeeded};
|
||||
_jobEc.Initialize(hc);
|
||||
_jobEc.InitializeJob(_message, _tokenSource.Token);
|
||||
|
||||
var x = _jobEc.JobSteps;
|
||||
|
||||
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
|
||||
|
||||
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
|
||||
Assert.Equal(0, _jobEc.PostJobSteps.Count);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,50 +302,6 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void SetEnvFileCommand_Heredoc_MissingNewLine()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||
var content = new List<string>
|
||||
{
|
||||
"MY_ENV<<EOF",
|
||||
"line one",
|
||||
"line two",
|
||||
"line three",
|
||||
"EOF",
|
||||
};
|
||||
WriteContent(envFile, content, " ");
|
||||
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||
Assert.Contains("Matching delimiter not found", ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLinesEnv()
|
||||
{
|
||||
using (var hostContext = Setup())
|
||||
{
|
||||
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||
var content = new List<string>
|
||||
{
|
||||
"MY_ENV<<EOF",
|
||||
@"line one
|
||||
line two
|
||||
line three",
|
||||
"EOF",
|
||||
};
|
||||
WriteContent(envFile, content, " ");
|
||||
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||
Assert.Contains("EOF marker missing new line", ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
{ "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}
|
||||
@@ -128,7 +128,6 @@ function layout ()
|
||||
chmod +x "${LAYOUT_DIR}/bin/Runner.Worker"
|
||||
chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost"
|
||||
chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh"
|
||||
chmod +x "${LAYOUT_DIR}/safe_sleep.sh"
|
||||
fi
|
||||
|
||||
heading "Setup externals folder for $RUNTIME_ID runner's layout"
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.289.0
|
||||
2.286.1
|
||||
|
||||
Reference in New Issue
Block a user