Compare commits

..

27 Commits

Author SHA1 Message Date
Thomas Boop
b3b97b7328 Update releaseVersion 2022-03-18 14:36:38 -04:00
Thomas Boop
5f72720698 2.289.1 release (#1772)
* Update dependencies to latest versions (#1756)

* cleanup message displayed on job started/completed hooks (#1769)

* Revert "Added repository name and workflow file name to console output (#1761)" (#1770)

98aa9c1152

* 2.289.1 release notes (#1771)
2022-03-18 14:36:17 -04:00
Ferenc Hammerl
bc0c1263f0 Release version 289.0 2022-03-18 16:33:39 +01:00
Thomas Boop
82a4ca9a6b 2.289.0 release notes 2022-03-17 21:55:53 -04:00
Yashwanth Anantharaju
d081289ed5 postlines: refactor per feedback (#1755)
* refactor per feedback

* feedback

* nit

* commentify

* feedback

* feedback
2022-03-17 21:35:20 -04:00
Ferenc Hammerl
7d5e9cd70f Runner Job Started/Completed Hooks (#1737)
* Prototype for pre job hook

* Remove debug log

* Enable hooks again

* Initialize with hostContext

* Add event_path, fix no-path bug

* Allow script post steps

* Call script handler with correct pre post stage

* Add job completed hook

* Make filecommand work and hardcode shell

* Conditionally print step details and no telemetry for hooks

* Figure out whih script to use

* Only check path for managed scripts

* Resture win dependency

* Nits

* Remove unused, add named params

* Telemetry + refactoring

* add message to job

* rename hooks remove stale comment

* cleanup

* Use .CreateService to create step

* Add L0s

* pr feedback

* update tests

* add disclaimer, clean up code

* spacing fix

* little more cleanup

* pr fix

* pr feedback

* Refactor to use JobExtension

* fix tests

* fix typo

* cleanup code

* more cleanup

* little more cleanup

* last bit of cleanup

* fix tests

* nit fix

* Update src/Runner.Worker/JobHookProvider.cs

Co-authored-by: Edward Thomson <ethomson@github.com>

* don't override runner telemtry

* pr feedback

* pr feedback

* pr feedback

Co-authored-by: Thomas Boop <thboop@github.com>
Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>
Co-authored-by: Edward Thomson <ethomson@github.com>
2022-03-17 21:35:04 -04:00
ruvceskistefan
98aa9c1152 Added repository name and workflow file name to console output (#1761)
* Adding repo name and workflow file to console output

* Add guard for empty workflow file name
2022-03-17 13:25:28 -04:00
Yashwanth Anantharaju
ddc700e9eb Send postlines via websocket if we can (#1730)
* feed via websocket

* feed via websocket

* feedback

* ensure right schema is used

* fix resiliency

* some fixes

* fix sending message

* chunk data

* let's abort, which will also dispose

* close gracefully
2022-03-15 14:01:18 -04:00
Konrad Pabjan
a0458aebfe Save record order for annotation links when creating issues (#1744)
* Save record order for annotation links when creating issues

* PR feedback

* Add tests for step and line numbers
2022-03-14 11:20:11 -04:00
Tingluo Huang
b2c6d093b2 Validate packages hash before uploading to github release in CD workflow. (#1745) 2022-03-14 09:21:13 -04:00
Thomas Boop
292a2e0ab3 Fix spelling (#1747) 2022-03-11 09:41:54 -05:00
Nikola Jokic
29cee52276 Prefer user who initiated install before (#1714) 2022-03-10 14:08:19 +01:00
Antoine Grondin
ad0d0c4d0a worker: expose github.triggering_actor as an env-var (#1726)
* worker: expose `github.triggering_actor` as an env-var

* worker: sort the allow list
2022-03-02 16:49:26 -05:00
Thomas Boop
2c6064a655 Update to v2.288.1 (#1723)
We hotfixed the releases/m288 branch to update to v2.288.1. This PR brings us to parity on the main branch
2022-03-01 18:19:56 +00:00
ruvceskistefan
af6c8e6edd Issue 1698: Use safe_sleep executable in bash scripts (#1707)
* use safe_sleep executable in bash scripts

* new line at the end of safe_sleep bash script

* Replacing relative paths with absolute paths and changing location of safe_sleep

Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2022-03-01 13:08:52 +00:00
Nikola Jokic
c15d3f10b2 Enhancement: RunnerService.js added logic to fail on N attempts if env variable exported (#1693)
* RunnerService.js added logic to fail on N attempts

* removed code grafeculShutdown, removed unused import
2022-03-01 13:55:25 +01:00
TingluoHuang
bdf1e90503 Prepare 2.288.0 runner releases. 2022-02-25 15:08:26 -05:00
Ferenc Hammerl
100c99f263 Force JS Actions Node version to 16 if FF is on unless user opted out (#1716)
* Set GH actions Node version to 16 if FF is on unless user opted out

* Add L0s (WIP)

* Wrap tests into theory

* Only check for node12 actions

* Refactor node version picking
2022-02-25 14:59:16 -05:00
Ferenc Hammerl
e8ccafea63 Add internal to node version function and use better env var name (#1715) 2022-02-25 14:59:02 -05:00
Thomas Boop
02d2cb8fcd Lets allow up to 150 characters for services on linux/mac (#1710)
* Lets allow up to 150 characters on linux/mac, just to avoid some issues with runner naming

* Add 4 randomized digits on mac/linux

* fix pragma issue

* fix test

* Address pr feedback

* reduce complexity

* lets make it cleaner!

* fix test

* fix logic
2022-02-24 21:05:51 -05:00
Rob Herley
0cbf3351f4 Update summary max file size annotation error text (#1712)
* add doc link to summary file size err annotation

* Update src/Runner.Common/Constants.cs

Co-authored-by: Konrad Pabjan <konradpabjan@github.com>

* remove i18n from url

Co-authored-by: Konrad Pabjan <konradpabjan@github.com>
2022-02-24 21:01:49 -05:00
Ferenc Hammerl
6abef8199f Use better exit codes and comparison (#1708) 2022-02-24 21:10:52 +01:00
ruvceskistefan
ec9830836b Issue 1596: Runner throws null ref exception when new line after EOF is missing (#1687)
* Issue 1596: runner throws nullref exception when writting env var

* Adding tests for missing new line after EOF marker

* Changing newline to new line
2022-02-23 09:55:59 -05:00
Nikola Jokic
460c32a337 Repaired hashFiles call so if error was thrown, it was returned to process invoker (#1678)
* hashFiles.ts added exit status on promise action

* generated layoutbin/hashfiles/index.js
2022-02-23 09:51:09 -05:00
ruvceskistefan
934027da60 Issue 1261: inconsistency of outputs (both canceled and cancelled are used) (#1624)
* Issue 1261: inconsistency of outputs

* Changing cancelled to canceled in one error message
2022-02-23 09:48:24 -05:00
Tingluo Huang
28f0027938 Add SHA to useragent. (#1694) 2022-02-17 20:01:48 +00:00
Thomas Boop
17153c9b29 Revert "revert node12 version due to fs.copyFileSync hang https://git… (#1651)
* Revert "revert node12 version due to fs.copyFileSync hang https://github.com/actions/runner/issues/1536 (#1537)"

bef164a12f

* check hashs before tests because tests rely on right values + update hashes

* fix tests

* use hc trace
2022-02-17 09:54:13 -05:00
41 changed files with 2993 additions and 4116 deletions

View File

@@ -260,6 +260,17 @@ jobs:
console.log(releaseNote) console.log(releaseNote)
core.setOutput('version', runnerVersion); core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote); core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release # Create GitHub release
- uses: actions/create-release@master - uses: actions/create-release@master
id: createRelease id: createRelease

View File

@@ -1,19 +1,11 @@
## Features ## Features
- Add Runner Configuration option to disable auto update `--disableupdate` (#1558)
- Introduce `GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY` env variable to skip SSL Cert Verification on the Runner (#1616)
- Adds support for downloading trimmed versions of the runner when the entire package does not need to be upgraded (#1568)
## Bugs ## Bugs
- Set Outcome/Conclusion for composite action steps (#1600) - Fixed a crash on runner startup (#1770)
## Misc ## Misc
- Update `run.sh` to more gracefully handle updates (#1494) - Clarified the type of step running when running job started or completed hooks (#1769)
- Use 8Mb default chunking for File Container Uploads (#1626)
- Performance improvements in handling large amounts of live logs (#1592)
- Allow `./svc.sh stop` to exit as soon as runner process exits (#1580)
- Add additional tracing to help troubleshoot job message corruption (#1587)
## Windows x64 ## Windows x64

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release> 2.289.1

View File

@@ -1,6 +1,6 @@
{ {
"plugins": ["jest", "@typescript-eslint"], "plugins": ["@typescript-eslint"],
"extends": ["plugin:github/es6"], "extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": { "parserOptions": {
"ecmaVersion": 9, "ecmaVersion": 9,
@@ -17,13 +17,16 @@
"@typescript-eslint/no-require-imports": "error", "@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error", "@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error", "@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error", "@typescript-eslint/naming-convention": [
"error",
{
"selector": "default",
"format": ["camelCase"]
}
],
"camelcase": "off", "camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}], "@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"], "@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error", "@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error", "@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error", "@typescript-eslint/no-explicit-any": "error",
@@ -33,7 +36,6 @@
"@typescript-eslint/no-misused-new": "error", "@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error", "@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn", "@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error", "@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error", "@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error", "@typescript-eslint/no-useless-constructor": "error",
@@ -41,19 +43,19 @@
"@typescript-eslint/prefer-for-of": "warn", "@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn", "@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error", "@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error", "@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error", "@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error", "@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error", "@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"], "@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error", "@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error" "@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning
"semi": "off"
}, },
"env": { "env": {
"node": true, "node": true,
"es6": true, "es6": true
"jest/globals": true
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -25,10 +25,10 @@
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^12.7.12", "@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0", "@typescript-eslint/parser": "^5.15.0",
"@zeit/ncc": "^0.20.5", "@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0", "eslint": "^8.11.0",
"eslint-plugin-github": "^2.0.0", "eslint-plugin-github": "^4.3.5",
"prettier": "^1.19.1", "prettier": "^1.19.1",
"typescript": "^3.6.4" "typescript": "^3.6.4"
} }

View File

@@ -1,9 +1,9 @@
import * as glob from '@actions/glob'
import * as crypto from 'crypto' import * as crypto from 'crypto'
import * as fs from 'fs' import * as fs from 'fs'
import * as glob from '@actions/glob'
import * as path from 'path'
import * as stream from 'stream' import * as stream from 'stream'
import * as util from 'util' import * as util from 'util'
import * as path from 'path'
async function run(): Promise<void> { async function run(): Promise<void> {
// arg0 -> node // arg0 -> node

View File

@@ -4,7 +4,6 @@
var childProcess = require("child_process"); var childProcess = require("child_process");
var path = require("path"); var path = require("path");
const { exit } = require("process");
var supported = ["linux", "darwin"]; var supported = ["linux", "darwin"];
@@ -27,7 +26,7 @@ if (exitServiceAfterNFailures <= 0) {
var consecutiveFailureCount = 0; var consecutiveFailureCount = 0;
var gracefulShutdown = function (code) { var gracefulShutdown = function () {
console.log("Shutting down runner listener"); console.log("Shutting down runner listener");
stopping = true; stopping = true;
if (listener) { if (listener) {
@@ -109,7 +108,7 @@ var runService = function () {
console.error( console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures` `${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
); );
gracefulShutdown(5); gracefulShutdown();
return; return;
} else { } else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`); console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
@@ -130,9 +129,9 @@ runService();
console.log("Started running service"); console.log("Started running service");
process.on("SIGINT", () => { process.on("SIGINT", () => {
gracefulShutdown(0); gracefulShutdown();
}); });
process.on("SIGTERM", () => { process.on("SIGTERM", () => {
gracefulShutdown(0); gracefulShutdown();
}); });

View File

@@ -63,7 +63,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}" mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH} echo Creating ${PLIST_PATH}
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist" mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -1557,12 +1557,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const glob = __importStar(__webpack_require__(281));
const crypto = __importStar(__webpack_require__(417)); const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const glob = __importStar(__webpack_require__(281));
const path = __importStar(__webpack_require__(622));
const stream = __importStar(__webpack_require__(413)); const stream = __importStar(__webpack_require__(413));
const util = __importStar(__webpack_require__(669)); const util = __importStar(__webpack_require__(669));
const path = __importStar(__webpack_require__(622));
function run() { function run() {
var e_1, _a; var e_1, _a;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ] while [ -e /proc/$runnerpid ]
do do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
sleep 2 "$rootfolder"/safe_sleep.sh 2
done done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders # start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1 date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
sleep 1 "$rootfolder"/safe_sleep.sh 1
# the folder structure under runner root will be # the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder) # ./bin -> bin.2.100.0 (junction folder)

View File

@@ -10,22 +10,6 @@ fi
# Run # Run
shopt -s nocasematch shopt -s nocasematch
safe_sleep() {
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
}
SOURCE="${BASH_SOURCE[0]}" SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
@@ -44,18 +28,18 @@ elif [[ $returnCode == 1 ]]; then
exit 0 exit 0
elif [[ $returnCode == 2 ]]; then elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds." echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
safe_sleep "$DIR"/safe_sleep.sh 5
exit 1 exit 2
elif [[ $returnCode == 3 ]]; then elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish # Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds" echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
safe_sleep "$DIR"/safe_sleep.sh 5
exit 1 exit 2
elif [[ $returnCode == 4 ]]; then elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish # Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds" echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
safe_sleep "$DIR"/safe_sleep.sh 5
exit 1 exit 2
else else
echo "Exiting with unknown error code: ${returnCode}" echo "Exiting with unknown error code: ${returnCode}"
exit 0 exit 0

View File

@@ -15,7 +15,7 @@ while :;
do do
"$DIR"/run-helper.sh $* "$DIR"/run-helper.sh $*
returnCode=$? returnCode=$?
if [[ $returnCode == 1 ]]; then if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..." echo "Restarting runner..."
else else
echo "Exiting runner..." echo "Exiting runner..."

View File

@@ -0,0 +1,6 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -156,7 +156,8 @@ namespace GitHub.Runner.Common
public static readonly string LowDiskSpace = "LOW_DISK_SPACE"; public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND"; public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/"; public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
} }
public static class RunnerEvent public static class RunnerEvent
@@ -188,6 +189,12 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success"; public static readonly string Success = "success";
} }
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path public static class Path
{ {
public static readonly string ActionsDirectory = "_actions"; public static readonly string ActionsDirectory = "_actions";
@@ -219,14 +226,15 @@ namespace GitHub.Runner.Common
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS"; public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG"; public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG"; public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
} }
public static class Agent public static class Agent
{ {
public static readonly string ToolsDirectory = "agent.ToolsDirectory"; public static readonly string ToolsDirectory = "agent.ToolsDirectory";
// Set this env var to force a node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions. // Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedNodeVersion = "GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION"; public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
} }
public static class System public static class System

View File

@@ -3,23 +3,27 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Net.Http; using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
[ServiceLocator(Default = typeof(JobServer))] [ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService public interface IJobServer : IRunnerService, IAsyncDisposable
{ {
Task ConnectAsync(VssConnection jobConnection); Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console // logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken); Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken); Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
@@ -34,6 +38,20 @@ namespace GitHub.Runner.Common
private bool _hasConnection; private bool _hasConnection;
private VssConnection _connection; private VssConnection _connection;
private TaskHttpClient _taskClient; private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection) public async Task ConnectAsync(VssConnection jobConnection)
{ {
@@ -117,6 +135,19 @@ namespace GitHub.Runner.Common
} }
} }
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection() private void CheckConnection()
{ {
if (!_hasConnection) if (!_hasConnection)
@@ -125,6 +156,48 @@ namespace GitHub.Runner.Common
} }
} }
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch(Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//----------------------------------------------------------------- //-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs // Feedback: WebConsole, TimelineRecords and Logs
//----------------------------------------------------------------- //-----------------------------------------------------------------
@@ -135,16 +208,72 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken); return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
} }
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken) public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
{ {
CheckConnection(); CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken); var pushedLinesViaWebsocket = false;
} if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken) // "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
{ // ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
CheckConnection(); if (_websocketClient != null)
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken); {
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
} }
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken) public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)

View File

@@ -71,7 +71,7 @@ namespace GitHub.Runner.Common
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds). // Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms. // Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start, // In this way, customer still can get instance live console output on job start,
// at the same time we can cut the load to server after the build run for more than 60s // at the same time we can cut the load to server after the build run for more than 60s
private int _webConsoleLineAggressiveDequeueCount = 0; private int _webConsoleLineAggressiveDequeueCount = 0;
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60; private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
@@ -89,6 +89,10 @@ namespace GitHub.Runner.Common
{ {
Trace.Entering(); Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess) if (_queueInProcess)
{ {
Trace.Info("No-opt, all queue process tasks are running."); Trace.Info("No-opt, all queue process tasks are running.");
@@ -156,6 +160,9 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true); await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained."); Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained."); Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
} }
@@ -292,15 +299,7 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
// we will not requeue failed batch, since the web console lines are time sensitive. await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs) if (_firstConsoleOutputs)
{ {
@@ -489,8 +488,8 @@ namespace GitHub.Runner.Common
if (runOnce) if (runOnce)
{ {
// continue process timeline records update, // continue process timeline records update,
// we might have more records need update, // we might have more records need update,
// since we just create a new sub-timeline // since we just create a new sub-timeline
if (pendingSubtimelineUpdate) if (pendingSubtimelineUpdate)
{ {

View File

@@ -7,9 +7,9 @@ namespace GitHub.Runner.Common.Util
{ {
private const string _defaultNodeVersion = "node16"; private const string _defaultNodeVersion = "node16";
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"}); public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"});
public static string GetNodeVersion() public static string GetInternalNodeVersion()
{ {
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedNodeVersion); var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion; return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
} }
} }

View File

@@ -315,7 +315,7 @@ namespace GitHub.Runner.Listener.Check
}); });
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert"); var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' "); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}"); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync( await processInvoker.ExecuteAsync(

View File

@@ -145,7 +145,7 @@ namespace GitHub.Runner.Listener.Check
}); });
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js"); var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' "); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}"); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync( await processInvoker.ExecuteAsync(

View File

@@ -48,13 +48,12 @@ namespace GitHub.Runner.Listener.Configuration
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-"); string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName); serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
if (serviceName.Length > MaxServiceNameLength)
if (serviceName.Length > 80)
{ {
Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name."); Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name.");
// Add 5 to add -xxxx random number on the end
int exceededCharLength = serviceName.Length - 80; int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45); string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters);
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length; exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
@@ -66,6 +65,10 @@ namespace GitHub.Runner.Listener.Configuration
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength); runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
} }
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
var random = new Random();
var num = random.Next(1000, 9999).ToString();
runnerNameSubstring +=$"-{num}";
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring); serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
} }
@@ -73,5 +76,12 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration."); Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
} }
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
const int MaxServiceNameLength = 80;
const int MaxRepoOrgCharacters = 45;
#endif
} }
} }

View File

@@ -1070,7 +1070,7 @@ namespace GitHub.Runner.Listener
var stopWatch = Stopwatch.StartNew(); var stopWatch = Stopwatch.StartNew();
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin); string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles"); string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty; var hashResult = string.Empty;

View File

@@ -1,7 +1,5 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating; using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens; using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -11,6 +9,7 @@ using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers; using GitHub.Runner.Worker.Handlers;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -141,21 +140,7 @@ namespace GitHub.Runner.Worker
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>(); IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
// Makes directory for event_path data ExecutionContext.WriteWebhookPayload();
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository // Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction && if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&

View File

@@ -109,6 +109,7 @@ namespace GitHub.Runner.Worker
void ForceTaskComplete(); void ForceTaskComplete();
void RegisterPostJobStep(IStep step); void RegisterPostJobStep(IStep step);
void PublishStepTelemetry(); void PublishStepTelemetry();
void WriteWebhookPayload();
} }
public sealed class ExecutionContext : RunnerService, IExecutionContext public sealed class ExecutionContext : RunnerService, IExecutionContext
@@ -550,10 +551,15 @@ namespace GitHub.Runner.Worker
issue.Message = issue.Message[.._maxIssueMessageLength]; issue.Message = issue.Message[.._maxIssueMessageLength];
} }
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
if (_record.Order != null)
{
issue.Data["stepNumber"] = _record.Order.ToString();
}
if (issue.Type == IssueType.Error) if (issue.Type == IssueType.Error)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Error, logMessage); long logLineNumber = Write(WellKnownTags.Error, logMessage);
@@ -569,8 +575,6 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Warning) else if (issue.Type == IssueType.Warning)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Warning, logMessage); long logLineNumber = Write(WellKnownTags.Warning, logMessage);
@@ -586,9 +590,6 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Notice) else if (issue.Type == IssueType.Notice)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Notice, logMessage); long logLineNumber = Write(WellKnownTags.Notice, logMessage);
@@ -680,7 +681,7 @@ namespace GitHub.Runner.Worker
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false) if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
{ {
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedNodeVersion, "node12"); Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
} }
// Environment variables shared across all actions // Environment variables shared across all actions
@@ -991,6 +992,24 @@ namespace GitHub.Runner.Worker
} }
} }
public void WriteWebhookPayload()
{
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
SetGitHubContext("event_path", workflowFile);
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order) private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{ {
_mainTimelineId = timelineId; _mainTimelineId = timelineId;

View File

@@ -63,7 +63,7 @@ namespace GitHub.Runner.Worker.Expressions
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location); string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName; string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles"); string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty; var hashResult = string.Empty;
var p = new ProcessInvoker(new HashFilesTrace(context.Trace)); var p = new ProcessInvoker(new HashFilesTrace(context.Trace));

View File

@@ -298,7 +298,7 @@ namespace GitHub.Runner.Worker
if (fileSize > _attachmentSizeLimit) if (fileSize > _attachmentSizeLimit)
{ {
context.Error($"$GITHUB_STEP_SUMMARY supports content up a size of {_attachmentSizeLimit / 1024}k got {fileSize / 1024}k"); context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, _attachmentSizeLimit / 1024, fileSize / 1024));
Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload"); Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload");
return; return;

View File

@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
{ {
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase) private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{ {
"action",
"action_path", "action_path",
"action_ref", "action_ref",
"action_repository", "action_repository",
"action",
"actor", "actor",
"api_url", "api_url",
"base_ref", "base_ref",
@@ -22,12 +22,12 @@ namespace GitHub.Runner.Worker
"head_ref", "head_ref",
"job", "job",
"path", "path",
"ref",
"ref_name", "ref_name",
"ref_protected", "ref_protected",
"ref_type", "ref_type",
"repository", "ref",
"repository_owner", "repository_owner",
"repository",
"retention_days", "retention_days",
"run_attempt", "run_attempt",
"run_id", "run_id",
@@ -35,6 +35,7 @@ namespace GitHub.Runner.Worker
"server_url", "server_url",
"sha", "sha",
"step_summary", "step_summary",
"triggering_actor",
"workflow", "workflow",
"workspace", "workspace",
}; };

View File

@@ -36,6 +36,7 @@ namespace GitHub.Runner.Worker.Handlers
protected IActionCommandManager ActionCommandManager { get; private set; } protected IActionCommandManager ActionCommandManager { get; private set; }
public Pipelines.ActionStepDefinitionReference Action { get; set; } public Pipelines.ActionStepDefinitionReference Action { get; set; }
public bool IsActionStep => Action != null;
public Dictionary<string, string> Environment { get; set; } public Dictionary<string, string> Environment { get; set; }
public Variables RuntimeVariables { get; set; } public Variables RuntimeVariables { get; set; }
public IExecutionContext ExecutionContext { get; set; } public IExecutionContext ExecutionContext { get; set; }
@@ -49,13 +50,18 @@ namespace GitHub.Runner.Worker.Handlers
// Print out action details // Print out action details
PrintActionDetails(stage); PrintActionDetails(stage);
// Get telemetry for the action. // Get telemetry for the action
PopulateActionTelemetry(); PopulateActionTelemetry(stage);
} }
protected void PopulateActionTelemetry() protected void PopulateActionTelemetry(ActionRunStage stage)
{ {
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry) if (!IsActionStep)
{
ExecutionContext.StepTelemetry.Type = "runner";
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
}
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{ {
ExecutionContext.StepTelemetry.Type = "docker"; ExecutionContext.StepTelemetry.Type = "docker";
var registryAction = Action as Pipelines.ContainerRegistryReference; var registryAction = Action as Pipelines.ContainerRegistryReference;

View File

@@ -55,7 +55,23 @@ namespace GitHub.Runner.Worker.Handlers
else if (data.ExecutionType == ActionExecutionType.NodeJS) else if (data.ExecutionType == ActionExecutionType.NodeJS)
{ {
handler = HostContext.CreateService<INodeScriptActionHandler>(); handler = HostContext.CreateService<INodeScriptActionHandler>();
(handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData; var nodeData = data as NodeJSActionExecutionData;
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
{
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
if (!isOptOut)
{
nodeData.NodeVersion = "node16";
}
}
(handler as INodeScriptActionHandler).Data = nodeData;
} }
else if (data.ExecutionType == ActionExecutionType.Script) else if (data.ExecutionType == ActionExecutionType.Script)
{ {

View File

@@ -6,7 +6,6 @@ using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {

View File

@@ -24,16 +24,22 @@ namespace GitHub.Runner.Worker.Handlers
protected override void PrintActionDetails(ActionRunStage stage) protected override void PrintActionDetails(ActionRunStage stage)
{ {
// if we're executing a Job Extension, we won't have an 'Action'
if (stage == ActionRunStage.Post) if (!IsActionStep)
{ {
throw new NotSupportedException("Script action should not have 'Post' job action."); if (Inputs.TryGetValue("path", out var path))
{
ExecutionContext.Output($"##[group]Run '{path}'");
}
else
{
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
}
} }
else if (Action.Type == Pipelines.ActionSourceType.Script)
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
if (Action.Type == Pipelines.ActionSourceType.Script)
{ {
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n'); var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' }); var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0) if (firstNewLine >= 0)
@@ -42,17 +48,16 @@ namespace GitHub.Runner.Worker.Handlers
} }
ExecutionContext.Output($"##[group]Run {firstLine}"); ExecutionContext.Output($"##[group]Run {firstLine}");
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
} }
else else
{ {
throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}"); throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}");
}
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
} }
string argFormat; string argFormat;
@@ -132,11 +137,6 @@ namespace GitHub.Runner.Worker.Handlers
public async Task RunAsync(ActionRunStage stage) public async Task RunAsync(ActionRunStage stage)
{ {
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
// Validate args // Validate args
Trace.Entering(); Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
@@ -212,7 +212,8 @@ namespace GitHub.Runner.Worker.Handlers
} }
} }
if (!string.IsNullOrEmpty(shellCommand)) // Don't override runner telemetry here
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
{ {
ExecutionContext.StepTelemetry.Action = shellCommand; ExecutionContext.StepTelemetry.Action = shellCommand;
} }
@@ -222,10 +223,24 @@ namespace GitHub.Runner.Worker.Handlers
{ {
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'"); throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
} }
string scriptFilePath, resolvedScriptPath;
// We do not not the full path until we know what shell is being used, so that we can determine the file extension if (IsActionStep)
var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}"); {
var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}"; // We do not not the full path until we know what shell is being used, so that we can determine the file extension
scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
}
else
{
// JobExtensionRunners run a script file, we load that from the inputs here
if (!Inputs.ContainsKey("path"))
{
throw new ArgumentException("Expected 'path' input to be set");
}
scriptFilePath = Inputs["path"];
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
}
// Format arg string with script path // Format arg string with script path
var arguments = string.Format(argFormat, resolvedScriptPath); var arguments = string.Format(argFormat, resolvedScriptPath);
@@ -241,9 +256,12 @@ namespace GitHub.Runner.Worker.Handlers
#else #else
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14). // Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
var encoding = new UTF8Encoding(false); var encoding = new UTF8Encoding(false);
#endif #endif
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container if (IsActionStep)
File.WriteAllText(scriptFilePath, contents, encoding); {
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
}
// Prepend PATH // Prepend PATH
AddPrependPathToEnvironment(); AddPrependPathToEnvironment();
@@ -266,7 +284,7 @@ namespace GitHub.Runner.Worker.Handlers
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
{ {
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process // launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js"); string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}"; arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
fileName = node; fileName = node;

View File

@@ -1,6 +1,8 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -79,5 +81,22 @@ namespace GitHub.Runner.Worker.Handlers
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}"); throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
} }
} }
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
{
var format = "{0} {1}";
switch (Path.GetExtension(path))
{
case ".sh":
// use 'sh' args but prefer bash
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
return string.Format(format, pathToShell, _defaultArguments["sh"]);
case ".ps1":
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
default:
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
}
}
} }
} }

View File

@@ -15,6 +15,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
@@ -248,6 +249,19 @@ namespace GitHub.Runner.Worker
Trace.Info("Downloading actions"); Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>(); var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps); var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
// add hook to preJobSteps
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
if (!string.IsNullOrEmpty(startedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobStartedStepName,
data: (object)jobHookData));
}
preJobSteps.AddRange(prepareResult.ContainerSetupSteps); preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps // Add start-container steps, record and stop-container steps
@@ -337,6 +351,18 @@ namespace GitHub.Runner.Worker
} }
} }
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobCompletedStepName,
data: (object)jobHookData));
}
List<IStep> steps = new List<IStep>(); List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps); steps.AddRange(preJobSteps);
steps.AddRange(jobSteps); steps.AddRange(jobSteps);
@@ -406,7 +432,7 @@ namespace GitHub.Runner.Worker
// create a new timeline record node for 'Finalize job' // create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post); IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
context.StepTelemetry.Type = "runner"; context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "complete_joh"; context.StepTelemetry.Action = "complete_job";
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); })) using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{ {
try try

View File

@@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(JobHookProvider))]
public interface IJobHookProvider : IRunnerService
{
Task RunHook(IExecutionContext executionContext, object data);
}
public class JobHookData
{
public string Path {get; private set;}
public ActionRunStage Stage {get; private set;}
public JobHookData(ActionRunStage stage, string path)
{
Path = path;
Stage = stage;
}
}
public class JobHookProvider : RunnerService, IJobHookProvider
{
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
}
public async Task RunHook(IExecutionContext executionContext, object data)
{
// Get Inputs
var hookData = data as JobHookData;
ArgUtil.NotNull(hookData, nameof(JobHookData));
var displayName = hookData.Stage == ActionRunStage.Pre ? "job started hook" : "job completed hook";
// Log to users so that they know how this step was injected
executionContext.Output($"A {displayName} has been configured by the self-hosted runner administrator");
// Validate script file.
if (!File.Exists(hookData.Path))
{
throw new FileNotFoundException("File doesn't exist");
}
executionContext.WriteWebhookPayload();
// Create the handler data.
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
var stepHost = HostContext.CreateService<IDefaultStepHost>();
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
Dictionary<string, string> inputs = new()
{
["path"] = hookData.Path,
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
};
// Create the handler
var handlerFactory = HostContext.GetService<IHandlerFactory>();
var handler = handlerFactory.Create(
executionContext,
action: null,
stepHost,
new ScriptActionExecutionData(),
inputs,
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
executionContext.Global.Variables,
actionDirectory: scriptDirectory,
localActionContainerSetupSteps: null);
handler.PrepareExecution(hookData.Stage);
// Setup file commands
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(executionContext, null);
// Run the step and process the file commands
try
{
await handler.RunAsync(hookData.Stage);
}
finally
{
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System; using System;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Text.RegularExpressions;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Listener.Configuration;
using Xunit; using Xunit;
@@ -15,7 +16,7 @@ namespace GitHub.Runner.Common.Tests
{ {
RunnerSettings settings = new RunnerSettings(); RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisiskindofalongrunnername1"; settings.AgentName = "thisiskindofalongrunnerabcde";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample"; settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
@@ -43,7 +44,7 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("actions", serviceNameParts[0]); Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]); Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-' Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
Assert.Equal("thisiskindofalongrunnername1", serviceNameParts[3]); Assert.Equal("thisiskindofalongrunnerabcde", serviceNameParts[3]);
} }
} }
@@ -54,7 +55,7 @@ namespace GitHub.Runner.Common.Tests
{ {
RunnerSettings settings = new RunnerSettings(); RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisiskindofalongrunnername12"; settings.AgentName = "thisiskindofalongrunnernabcde";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample"; settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
@@ -82,88 +83,129 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("actions", serviceNameParts[0]); Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]); Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-' Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
Assert.Equal("thisiskindofalongrunnername12", serviceNameParts[3]); Assert.Equal("thisiskindofalongrunnernabcde", serviceNameParts[3]); // should not have random numbers unless we exceed 80
} }
} }
[Fact] #if OS_WINDOWS
[Trait("Level", "L0")] [Fact]
[Trait("Category", "Service")] [Trait("Level", "L0")]
public void CalculateServiceNameLimitsServiceNameTo80Chars() [Trait("Category", "Service")]
{ public void CalculateServiceNameLimitsServiceNameTo80Chars()
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisisareallyreallylongbutstillvalidagentname";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{ {
ServiceControlManager scm = new ServiceControlManager(); RunnerSettings settings = new RunnerSettings();
scm.Initialize(hc); settings.AgentName = "thisisareallyreallylongbutstillvalidagentname";
scm.CalculateServiceName( settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings, settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample";
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
// Verify name has been shortened to 80 characters string serviceNamePattern = "actions.runner.{0}.{1}";
Assert.Equal(80, serviceName.Length); string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
var serviceNameParts = serviceName.Split('.'); using (TestHostContext hc = CreateTestContext())
{
ServiceControlManager scm = new ServiceControlManager();
// Verify that each component has been shortened to a sensible length scm.Initialize(hc);
Assert.Equal("actions", serviceNameParts[0]); // Never shortened scm.CalculateServiceName(
Assert.Equal("runner", serviceNameParts[1]); // Never shortened settings,
Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-' serviceNamePattern,
Assert.Equal("thisisareallyreally", serviceNameParts[3]); // Remainder of unused chars serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
// Verify name has been shortened to 80 characters
Assert.Equal(80, serviceName.Length);
var serviceNameParts = serviceName.Split('.');
// Verify that each component has been shortened to a sensible length
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-'
Assert.Matches(@"^(thisisareallyr-[0-9]{4})$", serviceNameParts[3]); // Remainder of unused chars, 4 random numbers added at the end
}
} }
}
// Special 'defensive' test that verifies we can gracefully handle creating service names // Special 'defensive' test that verifies we can gracefully handle creating service names
// in case GitHub.com changes its org/repo naming convention in the future, // in case GitHub.com changes its org/repo naming convention in the future,
// and some of these characters may be invalid for service names // and some of these characters may be invalid for service names
// Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic // Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Service")] [Trait("Category", "Service")]
public void CalculateServiceNameSanitizeOutOfRangeChars() public void CalculateServiceNameSanitizeOutOfRangeChars()
{
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "name";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{ {
ServiceControlManager scm = new ServiceControlManager(); RunnerSettings settings = new RunnerSettings();
scm.Initialize(hc); settings.AgentName = "name";
scm.CalculateServiceName( settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings, settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()";
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
var serviceNameParts = serviceName.Split('.'); string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
// Verify service name parts are sanitized correctly using (TestHostContext hc = CreateTestContext())
Assert.Equal("actions", serviceNameParts[0]); {
Assert.Equal("runner", serviceNameParts[1]); ServiceControlManager scm = new ServiceControlManager();
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
Assert.Equal("name", serviceNameParts[3]); scm.Initialize(hc);
scm.CalculateServiceName(
settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
var serviceNameParts = serviceName.Split('.');
// Verify service name parts are sanitized correctly
Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
Assert.Equal("name", serviceNameParts[3]);
}
} }
} #else
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Service")]
public void CalculateServiceNameLimitsServiceNameTo150Chars()
{
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisisareallyreallylongbutstillvalidagentnameiamusingforthisexampletotestverylongnamelimits";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myreallylongorganizationexampleonlinux/myreallylongrepoexampleonlinux1234";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{
ServiceControlManager scm = new ServiceControlManager();
scm.Initialize(hc);
scm.CalculateServiceName(
settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
// Verify name has been shortened to 150
Assert.Equal(150, serviceName.Length);
var serviceNameParts = serviceName.Split('.');
// Verify that each component has been shortened to a sensible length
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
Assert.Equal("myreallylongorganizationexampleonlinux-myreallylongrepoexampleonlinux1", serviceNameParts[2]); // First 70 chars, '/' has been replaced with '-'
Assert.Matches(@"^(thisisareallyreallylongbutstillvalidagentnameiamusingforthi-[0-9]{4})$", serviceNameParts[3]);
}
}
#endif
private TestHostContext CreateTestContext([CallerMemberName] string testName = "") private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
{ {

View File

@@ -118,7 +118,7 @@ namespace GitHub.Runner.Common.Tests.Worker
await _actionRunner.RunAsync(); await _actionRunner.RunAsync();
//Assert //Assert
_ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once); _ec.Verify(x => x.WriteWebhookPayload(), Times.Once);
} }
[Fact] [Fact]

View File

@@ -144,6 +144,55 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void AddIssue_AddStepAndLineNumberInformation()
{
using (TestHostContext hc = CreateTestContext())
{
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice annotation that should have step and line number information" });
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Error).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Warning).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Notice).Count() == 1)), Times.AtLeastOnce);
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Worker")] [Trait("Category", "Worker")]

View File

@@ -0,0 +1,100 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Moq;
using Xunit;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class HandlerFactoryL0
{
private Mock<IExecutionContext> _ec;
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
{
var hostContext = new TestHostContext(this, testName);
_ec = new Mock<IExecutionContext>();
_ec.SetupAllProperties();
_ec.Object.Initialize(hostContext);
var handler = new Mock<INodeScriptActionHandler>();
handler.SetupAllProperties();
hostContext.EnqueueInstance(handler.Object);
//hostContext.EnqueueInstance(new ActionCommandManager() as IActionCommandManager);
return hostContext;
}
[Theory]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
[InlineData("node12", "", "", "", "node12")]
[InlineData("node12", "true", "", "", "node16")]
[InlineData("node12", "true", "", "true", "node12")]
[InlineData("node12", "true", "true", "", "node12")]
[InlineData("node12", "true", "true", "true", "node12")]
[InlineData("node12", "true", "false", "true", "node16")] // workflow overrides env
[InlineData("node16", "", "", "", "node16")]
[InlineData("node16", "true", "", "", "node16")]
[InlineData("node16", "true", "", "true", "node16")]
[InlineData("node16", "true", "true", "", "node16")]
[InlineData("node16", "true", "true", "true", "node16")]
[InlineData("node16", "true", "false", "true", "node16")]
public void IsNodeVersionUpgraded(string inputVersion, string serverFeatureFlag, string workflowOptOut, string machineOptOut, string expectedVersion)
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var hf = new HandlerFactory();
hf.Initialize(hc);
// Server Feature Flag
var variables = new Dictionary<string, VariableValue>();
if (!string.IsNullOrEmpty(serverFeatureFlag))
{
variables["DistributedTask.ForceGithubJavascriptActionsToNode16"] = serverFeatureFlag;
}
Variables serverVariables = new Variables(hc, variables);
// Workflow opt-out
var workflowVariables = new Dictionary<string, string>();
if (!string.IsNullOrEmpty(workflowOptOut))
{
workflowVariables[Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion] = workflowOptOut;
}
// Machine opt-out
if (!string.IsNullOrEmpty(machineOptOut))
{
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, machineOptOut);
}
_ec.Setup(x => x.Global).Returns(new GlobalContext()
{
Variables = serverVariables,
EnvironmentVariables = workflowVariables
});
// Act.
var data = new NodeJSActionExecutionData();
data.NodeVersion = inputVersion;
var handler = hf.Create(
_ec.Object,
new ScriptReference(),
new Mock<IStepHost>().Object,
data,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
new Variables(hc, new Dictionary<string, VariableValue>()), "", new List<JobExtensionRunner>()
) as INodeScriptActionHandler;
// Assert.
Assert.Equal(expectedVersion, handler.Data.NodeVersion);
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, null);
}
}
}
}

View File

@@ -25,6 +25,7 @@ namespace GitHub.Runner.Common.Tests.Worker
private Mock<IPagingLogger> _logger; private Mock<IPagingLogger> _logger;
private Mock<IContainerOperationProvider> _containerProvider; private Mock<IContainerOperationProvider> _containerProvider;
private Mock<IDiagnosticLogManager> _diagnosticLogManager; private Mock<IDiagnosticLogManager> _diagnosticLogManager;
private Mock<IJobHookProvider> _jobHookProvider;
private CancellationTokenSource _tokenSource; private CancellationTokenSource _tokenSource;
private TestHostContext CreateTestContext([CallerMemberName] String testName = "") private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
@@ -40,6 +41,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_directoryManager = new Mock<IPipelineDirectoryManager>(); _directoryManager = new Mock<IPipelineDirectoryManager>();
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>())) _directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" }); .Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
_jobHookProvider = new Mock<IJobHookProvider>();
IActionRunner step1 = new ActionRunner(); IActionRunner step1 = new ActionRunner();
IActionRunner step2 = new ActionRunner(); IActionRunner step2 = new ActionRunner();
@@ -111,7 +113,9 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SetSingleton(_containerProvider.Object); hc.SetSingleton(_containerProvider.Object);
hc.SetSingleton(_directoryManager.Object); hc.SetSingleton(_directoryManager.Object);
hc.SetSingleton(_diagnosticLogManager.Object); hc.SetSingleton(_diagnosticLogManager.Object);
hc.SetSingleton(_jobHookProvider.Object);
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2
@@ -120,6 +124,7 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job complete hook
hc.EnqueueInstance<IActionRunner>(step1); hc.EnqueueInstance<IActionRunner>(step1);
hc.EnqueueInstance<IActionRunner>(step2); hc.EnqueueInstance<IActionRunner>(step2);
@@ -348,5 +353,62 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _jobEc.Result); Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async Task EnsurePreAndPostHookStepsIfEnvExists()
{
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", "/foo/bar");
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", "/bar/foo");
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);
var trace = hc.GetTrace();
var hookStart = result.First() as JobExtensionRunner;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(Constants.Hooks.JobStartedStepName, hookStart.DisplayName);
Assert.Equal(Constants.Hooks.JobCompletedStepName, (_jobEc.PostJobSteps.Last() as JobExtensionRunner).DisplayName);
}
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", null);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", null);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void EnsureNoPreAndPostHookSteps()
{
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_message.ActionsEnvironment = null;
_jobEc = new Runner.Worker.ExecutionContext {Result = TaskResult.Succeeded};
_jobEc.Initialize(hc);
_jobEc.InitializeJob(_message, _tokenSource.Token);
var x = _jobEc.JobSteps;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
Assert.Equal(0, _jobEc.PostJobSteps.Count);
}
}
} }
} }

View File

@@ -128,6 +128,7 @@ function layout ()
chmod +x "${LAYOUT_DIR}/bin/Runner.Worker" chmod +x "${LAYOUT_DIR}/bin/Runner.Worker"
chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost" chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost"
chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh" chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh"
chmod +x "${LAYOUT_DIR}/safe_sleep.sh"
fi fi
heading "Setup externals folder for $RUNTIME_ID runner's layout" heading "Setup externals folder for $RUNTIME_ID runner's layout"

View File

@@ -1 +1 @@
2.287.1 2.289.1