Compare commits

..

6 Commits

Author SHA1 Message Date
Rob Herley
e552c0cced separate logic into attachment summary func 2022-01-25 19:03:14 -05:00
Rob Herley
e112193313 use step id as md file name, queue file attachment 2022-01-25 19:00:12 -05:00
Sven Pfleiderer
386fa86f91 Try to simplify cleaning up file references 2022-01-21 16:56:35 -08:00
Sven Pfleiderer
412dc5c836 Fix file contention issue 2022-01-21 14:32:47 -08:00
Sven Pfleiderer
0361ffcbf9 Merge branch 'main' of github.com:actions/runner into pfleidi/step_summary_file 2022-01-21 11:25:53 -08:00
Sven Pfleiderer
25f6cb1d78 First prototype of step summary environment variable 2022-01-20 17:10:52 -08:00
86 changed files with 657 additions and 6074 deletions

View File

@@ -50,6 +50,13 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }} ${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Check runtime/externals hash # Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash - name: Compute/Compare runtime and externals Hash
shell: bash shell: bash
@@ -73,13 +80,6 @@ jobs:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}} DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}} EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Create runner package tar.gz/zip # Create runner package tar.gz/zip
- name: Package Release - name: Package Release
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'

View File

@@ -260,17 +260,6 @@ jobs:
console.log(releaseNote) console.log(releaseNote)
core.setOutput('version', runnerVersion); core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote); core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release # Create GitHub release
- uses: actions/create-release@master - uses: actions/create-release@master
id: createRelease id: createRelease

6
.vscode/launch.json vendored
View File

@@ -13,7 +13,6 @@
"cwd": "${workspaceFolder}/src", "cwd": "${workspaceFolder}/src",
"console": "integratedTerminal", "console": "integratedTerminal",
"requireExactSource": false, "requireExactSource": false,
"targetArchitecture": "x86_64"
}, },
{ {
"name": "Run", "name": "Run",
@@ -26,7 +25,6 @@
"cwd": "${workspaceFolder}/src", "cwd": "${workspaceFolder}/src",
"console": "integratedTerminal", "console": "integratedTerminal",
"requireExactSource": false, "requireExactSource": false,
"targetArchitecture": "x86_64"
}, },
{ {
"name": "Configure", "name": "Configure",
@@ -40,7 +38,6 @@
"cwd": "${workspaceFolder}/src", "cwd": "${workspaceFolder}/src",
"console": "integratedTerminal", "console": "integratedTerminal",
"requireExactSource": false, "requireExactSource": false,
"targetArchitecture": "x86_64"
}, },
{ {
"name": "Debug Worker", "name": "Debug Worker",
@@ -48,7 +45,6 @@
"request": "attach", "request": "attach",
"processName": "Runner.Worker", "processName": "Runner.Worker",
"requireExactSource": false, "requireExactSource": false,
"targetArchitecture": "x86_64"
}, },
{ {
"name": "Attach Debugger", "name": "Attach Debugger",
@@ -56,8 +52,6 @@
"request": "attach", "request": "attach",
"processId": "${command:pickProcess}", "processId": "${command:pickProcess}",
"requireExactSource": false, "requireExactSource": false,
"targetArchitecture": "x86_64"
}, },
], ],
} }

View File

@@ -6,35 +6,13 @@
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
- For GitHub.com - For GitHub.com
- The runner needs to access `https://api.github.com` for downloading actions. - The runner needs to access https://api.github.com for downloading actions.
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token. - The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs. - The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine:
```
curl -v https://api.github.com/api/v3/zen
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
curl -v https://pipelines.actions.githubusercontent/_apis/health
```
- For GitHub Enterprise Server - For GitHub Enterprise Server
- The runner needs to access `https://[hostname]/api/v3` for downloading actions. - The runner needs to access https://myGHES.com/api/v3 for downloading actions.
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token. - The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs. - The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine, replacing `[hostname]` with the hostname of your appliance, for instance `github.example.com`:
```
curl -v https://[hostname]/api/v3/zen
curl -v https://[hostname]/_services/vstoken/_apis/health
curl -v https://[hostname]/_services/pipelines/_apis/health
```
A common cause of this these connectivity issues is if your to GitHub Enterprise Server appliance is using [the self-signed certificate that is enabled the first time](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) your appliance is started. As self-signed certificates are not trusted by web browsers and Git clients, these clients (including the GitHub Actions runner) will report certificate warnings.
We recommend [upload a certificate signed by a trusted authority](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) to GitHub Enterprise Server, or enabling the built-in ][Let's Encrypt support](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls).
## What is checked? ## What is checked?
@@ -64,4 +42,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
## Still not working? ## Still not working?
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue. Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.

View File

@@ -4,9 +4,9 @@
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server. Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`. The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`. All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
> Not the `node` from `$PATH` > Not the `node` from `$PATH`

View File

@@ -1,17 +1,21 @@
## Features ## Features
- Added `github.triggering_actor` to the `github` context (#1726) - Bump runtime to dotnet 6 (#1471)
- Save step information when creating annotations (#1744) - Show service container logs on teardown (#1563)
- Improved performance of live log streaming (#1730, #1755)
- Added Beta support for job started and completed hooks (#1737)
## Bugs ## Bugs
- Add masks for multiline secrets from ::add-mask:: (#1521)
- fix Log size and retention settings not work (#1507)
- Refactor SelfUpdater adding L0 tests. (#1564)
- Fix test failure: /bin/sleep on Macos 11 (Monterey) does not accept the suffix s. (#1472)
## Misc ## Misc
- Made some minor job telemetry improvements (#1747) - Update dependency check for dotnet 6. (#1551)
- Added repository name and workflow file name to `run.sh` output (#1761) - Produce trimmed down runner packages. (#1556)
- Deleted extra background in github-praph.png, which is displayed in README.md (#1432)
## Windows x64 ## Windows x64

View File

@@ -1 +1 @@
2.289.0 <Update to ./src/runnerversion when creating release>

View File

@@ -13,7 +13,7 @@ set -e
flags_found=false flags_found=false
while getopts 's:g:n:r:u:l:' opt; do while getopts 's:g:n:u:l:' opt; do
flags_found=true flags_found=true
case $opt in case $opt in
@@ -26,9 +26,6 @@ while getopts 's:g:n:r:u:l:' opt; do
n) n)
runner_name=$OPTARG runner_name=$OPTARG
;; ;;
r)
runner_group=$OPTARG
;;
u) u)
svc_user=$OPTARG svc_user=$OPTARG
;; ;;
@@ -47,7 +44,6 @@ Usage:
-s required scope: repo (:owner/:repo) or org (:organization) -s required scope: repo (:owner/:repo) or org (:organization)
-g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment -g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment
-n optional name of the runner, defaults to hostname -n optional name of the runner, defaults to hostname
-r optional name of the runner group to add the runner to, defaults to the Default group
-u optional user svc will run as, defaults to current -u optional user svc will run as, defaults to current
-l optional list of labels (split by comma) applied on the runner" -l optional list of labels (split by comma) applied on the runner"
exit 0 exit 0
@@ -63,7 +59,6 @@ if ! "$flags_found"; then
runner_name=${3:-$(hostname)} runner_name=${3:-$(hostname)}
svc_user=${4:-$USER} svc_user=${4:-$USER}
labels=${5} labels=${5}
runner_group=${6}
fi fi
# apply defaults # apply defaults
@@ -169,8 +164,8 @@ fi
echo echo
echo "Configuring ${runner_name} @ $runner_url" echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}" echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"} sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
#--------------------------------------- #---------------------------------------
# Configuring as a service # Configuring as a service

View File

@@ -1 +1 @@
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51 6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e

View File

@@ -1 +1 @@
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78 b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081

View File

@@ -1 +1 @@
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147

View File

@@ -1 +1 @@
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036

View File

@@ -1 +1 @@
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18 40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665

File diff suppressed because it is too large Load Diff

View File

@@ -45,7 +45,7 @@ async function run(): Promise<void> {
result.end() result.end()
if (hasMatch) { if (hasMatch) {
console.log(`Found ${count} files to hash.`) console.log(`Find ${count} files to hash.`)
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`) console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
} else { } else {
console.error(`__OUTPUT____OUTPUT__`) console.error(`__OUTPUT____OUTPUT__`)
@@ -53,11 +53,3 @@ async function run(): Promise<void> {
} }
run() run()
.then(out => {
console.log(out)
process.exit(0)
})
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -3,7 +3,7 @@ PACKAGERUNTIME=$1
PRECACHE=$2 PRECACHE=$2
NODE_URL=https://nodejs.org/dist NODE_URL=https://nodejs.org/dist
NODE12_VERSION="12.22.7" NODE12_VERSION="12.13.1"
NODE16_VERSION="16.13.0" NODE16_VERSION="16.13.0"
get_abs_path() { get_abs_path() {
@@ -143,7 +143,7 @@ fi
# Download the external tools for Linux PACKAGERUNTIMEs. # Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
fi fi

View File

@@ -3,135 +3,94 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information. // Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process"); var childProcess = require("child_process");
var path = require("path"); var path = require("path")
var supported = ["linux", "darwin"]; var supported = ['linux', 'darwin']
if (supported.indexOf(process.platform) == -1) { if (supported.indexOf(process.platform) == -1) {
console.log("Unsupported platform: " + process.platform); console.log('Unsupported platform: ' + process.platform);
console.log("Supported platforms are: " + supported.toString()); console.log('Supported platforms are: ' + supported.toString());
process.exit(1); process.exit(1);
} }
var stopping = false; var stopping = false;
var listener = null; var listener = null;
var exitServiceAfterNFailures = Number( var runService = function () {
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
); var interactive = process.argv[2] === "interactive";
if (exitServiceAfterNFailures <= 0) { if (!stopping) {
exitServiceAfterNFailures = NaN; try {
if (interactive) {
console.log('Starting Runner listener interactively');
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
} else {
console.log('Starting Runner listener with startup type: service');
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.stderr.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on('close', (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
stopping = true;
} else if (code === 1) {
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
stopping = true;
} else if (code === 2) {
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
} else if (code === 3) {
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
} else {
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
} }
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () {
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var interactive = process.argv[2] === "interactive";
if (!stopping) {
try {
if (interactive) {
console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
} else {
console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
listener.stderr.on("data", (data) => {
process.stdout.write(data.toString("utf8"));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on("close", (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
stopping = true;
} else if (code === 1) {
console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
stopping = true;
} else if (code === 2) {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
};
runService(); runService();
console.log("Started running service"); console.log('Started running service');
process.on("SIGINT", () => { var gracefulShutdown = function (code) {
gracefulShutdown(); console.log('Shutting down runner listener');
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
}
}
process.on('SIGINT', () => {
gracefulShutdown(0);
}); });
process.on("SIGTERM", () => { process.on('SIGTERM', () => {
gracefulShutdown(); gracefulShutdown(0);
}); });

View File

@@ -17,13 +17,7 @@ RUNNER_ROOT=`pwd`
LAUNCH_PATH="${HOME}/Library/LaunchAgents" LAUNCH_PATH="${HOME}/Library/LaunchAgents"
PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist" PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist"
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE TEMPLATE_PATH=./bin/actions.runner.plist.template
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.plist.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMP_PATH=./bin/actions.runner.plist.temp TEMP_PATH=./bin/actions.runner.plist.temp
CONFIG_PATH=.service CONFIG_PATH=.service
@@ -35,11 +29,7 @@ function failed()
} }
if [ ! -f "${TEMPLATE_PATH}" ]; then if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then failed "Must run from runner root or install is corrupt"
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
fi fi
function install() function install()
@@ -63,7 +53,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}" mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH} echo Creating ${PLIST_PATH}
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist" mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -43,32 +43,6 @@ module.exports =
/************************************************************************/ /************************************************************************/
/******/ ({ /******/ ({
/***/ 82:
/***/ (function(__unusedmodule, exports) {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 87: /***/ 87:
/***/ (function(module) { /***/ (function(module) {
@@ -1004,42 +978,6 @@ function regExpEscape (s) {
} }
/***/ }),
/***/ 102:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
// For internal use, subject to change.
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
//# sourceMappingURL=file-command.js.map
/***/ }), /***/ }),
/***/ 281: /***/ 281:
@@ -1613,7 +1551,7 @@ function run() {
} }
result.end(); result.end();
if (hasMatch) { if (hasMatch) {
console.log(`Found ${count} files to hash.`); console.log(`Find ${count} files to hash.`);
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`); console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`);
} }
else { else {
@@ -1621,15 +1559,7 @@ function run() {
} }
}); });
} }
run() run();
.then(out => {
console.log(out);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});
/***/ }), /***/ }),
@@ -1757,25 +1687,17 @@ module.exports = require("crypto");
"use strict"; "use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(__webpack_require__(87)); const os = __webpack_require__(87);
const utils_1 = __webpack_require__(82);
/** /**
* Commands * Commands
* *
* Command Format: * Command Format:
* ::name key=value,key=value::message * ##[name key=value;key=value]message
* *
* Examples: * Examples:
* ::warning::This is the message * ##[warning]This is the user warning message
* ::set-env name=MY_VAR::some value * ##[set-secret name=mypassword]definitelyNotAPassword!
*/ */
function issueCommand(command, properties, message) { function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message); const cmd = new Command(command, properties, message);
@@ -1800,39 +1722,34 @@ class Command {
let cmdStr = CMD_STRING + this.command; let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) { if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' '; cmdStr += ' ';
let first = true;
for (const key in this.properties) { for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) { if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key]; const val = this.properties[key];
if (val) { if (val) {
if (first) { // safely append the val - avoid blowing up when attempting to
first = false; // call .replace() if message is not a string for some reason
} cmdStr += `${key}=${escape(`${val || ''}`)},`;
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
} }
} }
} }
} }
cmdStr += `${CMD_STRING}${escapeData(this.message)}`; cmdStr += CMD_STRING;
// safely append the message - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
const message = `${this.message || ''}`;
cmdStr += escapeData(message);
return cmdStr; return cmdStr;
} }
} }
function escapeData(s) { function escapeData(s) {
return utils_1.toCommandValue(s) return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
} }
function escapeProperty(s) { function escape(s) {
return utils_1.toCommandValue(s) return s
.replace(/%/g, '%25')
.replace(/\r/g, '%0D') .replace(/\r/g, '%0D')
.replace(/\n/g, '%0A') .replace(/\n/g, '%0A')
.replace(/:/g, '%3A') .replace(/]/g, '%5D')
.replace(/,/g, '%2C'); .replace(/;/g, '%3B');
} }
//# sourceMappingURL=command.js.map //# sourceMappingURL=command.js.map
@@ -1852,19 +1769,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next()); step((generator = generator.apply(thisArg, _arguments || [])).next());
}); });
}; };
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = __webpack_require__(431); const command_1 = __webpack_require__(431);
const file_command_1 = __webpack_require__(102); const os = __webpack_require__(87);
const utils_1 = __webpack_require__(82); const path = __webpack_require__(622);
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
/** /**
* The code to exit an action * The code to exit an action
*/ */
@@ -1885,21 +1793,11 @@ var ExitCode;
/** /**
* Sets env variable for this action and future actions in the job * Sets env variable for this action and future actions in the job
* @param name the name of the variable to set * @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify * @param val the value of the variable
*/ */
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) { function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val); process.env[name] = val;
process.env[name] = convertedVal; command_1.issueCommand('set-env', { name }, val);
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = '_GitHubActionsFileCommandDelimeter_';
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
}
} }
exports.exportVariable = exportVariable; exports.exportVariable = exportVariable;
/** /**
@@ -1915,13 +1813,7 @@ exports.setSecret = setSecret;
* @param inputPath * @param inputPath
*/ */
function addPath(inputPath) { function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || ''; command_1.issueCommand('add-path', {}, inputPath);
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
} }
exports.addPath = addPath; exports.addPath = addPath;
@@ -1944,22 +1836,12 @@ exports.getInput = getInput;
* Sets the value of an output. * Sets the value of an output.
* *
* @param name name of the output to set * @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify * @param value value to store
*/ */
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) { function setOutput(name, value) {
command_1.issueCommand('set-output', { name }, value); command_1.issueCommand('set-output', { name }, value);
} }
exports.setOutput = setOutput; exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//----------------------------------------------------------------------- //-----------------------------------------------------------------------
// Results // Results
//----------------------------------------------------------------------- //-----------------------------------------------------------------------
@@ -1976,13 +1858,6 @@ exports.setFailed = setFailed;
//----------------------------------------------------------------------- //-----------------------------------------------------------------------
// Logging Commands // Logging Commands
//----------------------------------------------------------------------- //-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/** /**
* Writes debug message to user log * Writes debug message to user log
* @param message debug message * @param message debug message
@@ -1993,18 +1868,18 @@ function debug(message) {
exports.debug = debug; exports.debug = debug;
/** /**
* Adds an error issue * Adds an error issue
* @param message error issue message. Errors will be converted to string via toString() * @param message error issue message
*/ */
function error(message) { function error(message) {
command_1.issue('error', message instanceof Error ? message.toString() : message); command_1.issue('error', message);
} }
exports.error = error; exports.error = error;
/** /**
* Adds an warning issue * Adds an warning issue
* @param message warning issue message. Errors will be converted to string via toString() * @param message warning issue message
*/ */
function warning(message) { function warning(message) {
command_1.issue('warning', message instanceof Error ? message.toString() : message); command_1.issue('warning', message);
} }
exports.warning = warning; exports.warning = warning;
/** /**
@@ -2062,9 +1937,8 @@ exports.group = group;
* Saves state for current action, the state can only be retrieved by this action's post job execution. * Saves state for current action, the state can only be retrieved by this action's post job execution.
* *
* @param name name of the state to store * @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify * @param value value to store
*/ */
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) { function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value); command_1.issueCommand('save-state', { name }, value);
} }

View File

@@ -10,11 +10,10 @@ if [ -f ".path" ]; then
echo ".path=${PATH}" echo ".path=${PATH}"
fi fi
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
# insert anything to setup env when running as a service # insert anything to setup env when running as a service
# run the host process which keep the listener alive # run the host process which keep the listener alive
./externals/$nodever/bin/node ./bin/RunnerService.js & ./externals/node12/bin/node ./bin/RunnerService.js &
PID=$! PID=$!
wait $PID wait $PID
trap - TERM INT trap - TERM INT

View File

@@ -10,13 +10,7 @@ arg_2=${2}
RUNNER_ROOT=`pwd` RUNNER_ROOT=`pwd`
UNIT_PATH=/etc/systemd/system/${SVC_NAME} UNIT_PATH=/etc/systemd/system/${SVC_NAME}
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE TEMPLATE_PATH=./bin/actions.runner.service.template
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.service.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMP_PATH=./bin/actions.runner.service.temp TEMP_PATH=./bin/actions.runner.service.temp
CONFIG_PATH=.service CONFIG_PATH=.service
@@ -37,11 +31,7 @@ function failed()
} }
if [ ! -f "${TEMPLATE_PATH}" ]; then if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then failed "Must run from runner root or install is corrupt"
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
fi fi
#check if we run as root #check if we run as root

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ] while [ -e /proc/$runnerpid ]
do do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 2 sleep 2
done done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders # start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1 date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 1 sleep 1
# the folder structure under runner root will be # the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder) # ./bin -> bin.2.100.0 (junction folder)
@@ -125,7 +125,7 @@ attemptedtargetedfix=0
currentplatform=$(uname | awk '{print tolower($0)}') currentplatform=$(uname | awk '{print tolower($0)}')
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
# We needed a fix for https://github.com/actions/runner/issues/743 # We needed a fix for https://github.com/actions/runner/issues/743
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service # We will recreate the ./externals/node12/bin/node of the past runner version that launched the runnerlistener service
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file # Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
# We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid # We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid
@@ -135,13 +135,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then then
# inspect the open file handles to find the node process # inspect the open file handles to find the node process
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks # we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
nodever="node16" path=$(lsof -a -g "$procgroup" -F n | grep node12/bin/node | grep externals | tail -1 | cut -c2-)
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
then
nodever="node12"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
fi
if [[ $? -eq 0 && -n "$path" ]] if [[ $? -eq 0 && -n "$path" ]]
then then
# trim the last 5 characters of the path '/node' # trim the last 5 characters of the path '/node'
@@ -154,7 +148,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then then
date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1 date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1
mkdir -p "$trimmedpath" mkdir -p "$trimmedpath"
cp "$rootfolder/externals/$nodever/bin/node" "$path" cp "$rootfolder/externals/node12/bin/node" "$path"
else else
date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1 date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1
fi fi

View File

@@ -10,15 +10,23 @@ fi
# Run # Run
shopt -s nocasematch shopt -s nocasematch
SOURCE="${BASH_SOURCE[0]}" safe_sleep() {
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink if [ ! -x "$(command -v sleep)" ]; then
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" if [ ! -x "$(command -v ping)" ]; then
SOURCE="$(readlink "$SOURCE")" COUNT="0"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located while [[ $COUNT != 5000 ]]; do
done echo "SLEEP" > /dev/null
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" COUNT=$[$COUNT+1]
"$DIR"/bin/Runner.Listener run $* done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
}
bin/Runner.Listener run $*
returnCode=$? returnCode=$?
if [[ $returnCode == 0 ]]; then if [[ $returnCode == 0 ]]; then
echo "Runner listener exit with 0 return code, stop the service, no retry needed." echo "Runner listener exit with 0 return code, stop the service, no retry needed."
@@ -28,18 +36,18 @@ elif [[ $returnCode == 1 ]]; then
exit 0 exit 0
elif [[ $returnCode == 2 ]]; then elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds." echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
"$DIR"/safe_sleep.sh 5 safe_sleep
exit 2 exit 1
elif [[ $returnCode == 3 ]]; then elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish # Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds" echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
"$DIR"/safe_sleep.sh 5 safe_sleep
exit 2 exit 1
elif [[ $returnCode == 4 ]]; then elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish # Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds" echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
"$DIR"/safe_sleep.sh 5 safe_sleep
exit 2 exit 1
else else
echo "Exiting with unknown error code: ${returnCode}" echo "Exiting with unknown error code: ${returnCode}"
exit 0 exit 0

View File

@@ -19,7 +19,7 @@ rem Run.
rem ******************************************************************************** rem ********************************************************************************
:launch_helper :launch_helper
copy "%~dp0run-helper.cmd.template" "%~dp0run-helper.cmd" /Y copy run-helper.cmd.template run-helper.cmd /Y
call "%~dp0run-helper.cmd" %* call "%~dp0run-helper.cmd" %*
if %ERRORLEVEL% EQU 1 ( if %ERRORLEVEL% EQU 1 (
@@ -27,5 +27,5 @@ if %ERRORLEVEL% EQU 1 (
goto :launch_helper goto :launch_helper
) else ( ) else (
echo "Exiting runner..." echo "Exiting runner..."
exit /b 0 exit 0
) )

View File

@@ -9,13 +9,13 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh cp -f run-helper.sh.template run-helper.sh
# run the helper process which keep the listener alive # run the helper process which keep the listener alive
while :; while :;
do do
"$DIR"/run-helper.sh $* "$DIR"/run-helper.sh $*
returnCode=$? returnCode=$?
if [[ $returnCode -eq 2 ]]; then if [[ $returnCode == 1 ]]; then
echo "Restarting runner..." echo "Restarting runner..."
else else
echo "Exiting runner..." echo "Exiting runner..."

View File

@@ -1,6 +0,0 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -33,9 +33,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public string PoolName { get; set; } public string PoolName { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool DisableUpdate { get; set; }
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public bool Ephemeral { get; set; } public bool Ephemeral { get; set; }

View File

@@ -9,6 +9,7 @@ namespace GitHub.Runner.Common
Externals, Externals,
Root, Root,
Actions, Actions,
StepSummary,
Temp, Temp,
Tools, Tools,
Update, Update,
@@ -129,7 +130,6 @@ namespace GitHub.Runner.Common
public static readonly string Ephemeral = "ephemeral"; public static readonly string Ephemeral = "ephemeral";
public static readonly string Help = "help"; public static readonly string Help = "help";
public static readonly string Replace = "replace"; public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it public static readonly string Once = "once"; // Keep this around since customers still relies on it
public static readonly string RunAsService = "runasservice"; public static readonly string RunAsService = "runasservice";
public static readonly string Unattended = "unattended"; public static readonly string Unattended = "unattended";
@@ -156,8 +156,7 @@ namespace GitHub.Runner.Common
public static readonly string LowDiskSpace = "LOW_DISK_SPACE"; public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND"; public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/"; public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
} }
public static class RunnerEvent public static class RunnerEvent
@@ -189,12 +188,6 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success"; public static readonly string Success = "success";
} }
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path public static class Path
{ {
public static readonly string ActionsDirectory = "_actions"; public static readonly string ActionsDirectory = "_actions";
@@ -204,6 +197,7 @@ namespace GitHub.Runner.Common
public static readonly string DiagDirectory = "_diag"; public static readonly string DiagDirectory = "_diag";
public static readonly string ExternalsDirectory = "externals"; public static readonly string ExternalsDirectory = "externals";
public static readonly string RunnerDiagnosticLogPrefix = "Runner_"; public static readonly string RunnerDiagnosticLogPrefix = "Runner_";
public static readonly string StepSummaryDirectory = "_step_summary";
public static readonly string TempDirectory = "_temp"; public static readonly string TempDirectory = "_temp";
public static readonly string ToolDirectory = "_tool"; public static readonly string ToolDirectory = "_tool";
public static readonly string UpdateDirectory = "_update"; public static readonly string UpdateDirectory = "_update";
@@ -226,15 +220,11 @@ namespace GitHub.Runner.Common
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS"; public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG"; public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG"; public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
} }
public static class Agent public static class Agent
{ {
public static readonly string ToolsDirectory = "agent.ToolsDirectory"; public static readonly string ToolsDirectory = "agent.ToolsDirectory";
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
} }
public static class System public static class System

View File

@@ -60,7 +60,6 @@ namespace GitHub.Runner.Common
case "GitHub.Runner.Worker.IFileCommandExtension": case "GitHub.Runner.Worker.IFileCommandExtension":
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker"); Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker"); Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.CreateStepSummaryCommand, Runner.Worker");
break; break;
case "GitHub.Runner.Listener.Check.ICheckExtension": case "GitHub.Runner.Listener.Check.ICheckExtension":
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener"); Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");

View File

@@ -216,8 +216,6 @@ namespace GitHub.Runner.Common
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture))); _userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture))); _userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
} }
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
} }
public string GetDirectory(WellKnownDirectory directory) public string GetDirectory(WellKnownDirectory directory)
@@ -245,6 +243,12 @@ namespace GitHub.Runner.Common
path = new DirectoryInfo(GetDirectory(WellKnownDirectory.Bin)).Parent.FullName; path = new DirectoryInfo(GetDirectory(WellKnownDirectory.Bin)).Parent.FullName;
break; break;
case WellKnownDirectory.StepSummary:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Temp),
Constants.Path.StepSummaryDirectory);
break;
case WellKnownDirectory.Temp: case WellKnownDirectory.Temp:
path = Path.Combine( path = Path.Combine(
GetDirectory(WellKnownDirectory.Work), GetDirectory(WellKnownDirectory.Work),

View File

@@ -1,36 +1,32 @@
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Net.Http; using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
[ServiceLocator(Default = typeof(JobServer))] [ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService, IAsyncDisposable public interface IJobServer : IRunnerService
{ {
Task ConnectAsync(VssConnection jobConnection); Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console // logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken); Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken); Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken); Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent; Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken); Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
} }
public sealed class JobServer : RunnerService, IJobServer public sealed class JobServer : RunnerService, IJobServer
@@ -38,20 +34,6 @@ namespace GitHub.Runner.Common
private bool _hasConnection; private bool _hasConnection;
private VssConnection _connection; private VssConnection _connection;
private TaskHttpClient _taskClient; private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection) public async Task ConnectAsync(VssConnection jobConnection)
{ {
@@ -60,7 +42,7 @@ namespace GitHub.Runner.Common
int attemptCount = totalAttempts; int attemptCount = totalAttempts;
var configurationStore = HostContext.GetService<IConfigurationStore>(); var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings(); var runnerSettings = configurationStore.GetSettings();
while (!_connection.HasAuthenticated && attemptCount-- > 0) while (!_connection.HasAuthenticated && attemptCount-- > 0)
{ {
try try
@@ -135,19 +117,6 @@ namespace GitHub.Runner.Common
} }
} }
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection() private void CheckConnection()
{ {
if (!_hasConnection) if (!_hasConnection)
@@ -156,48 +125,6 @@ namespace GitHub.Runner.Common
} }
} }
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch(Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//----------------------------------------------------------------- //-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs // Feedback: WebConsole, TimelineRecords and Logs
//----------------------------------------------------------------- //-----------------------------------------------------------------
@@ -208,72 +135,16 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken); return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
} }
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken) public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
{ {
CheckConnection(); CheckConnection();
var pushedLinesViaWebsocket = false; return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
if (_websocketConnectTask != null) }
{
await _websocketConnectTask;
}
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data {
if (_websocketClient != null) CheckConnection();
{ return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
} }
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken) public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
@@ -315,10 +186,10 @@ namespace GitHub.Runner.Common
//----------------------------------------------------------------- //-----------------------------------------------------------------
// Action download info // Action download info
//----------------------------------------------------------------- //-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{ {
CheckConnection(); CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, jobId, actions, cancellationToken: cancellationToken); return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
} }
} }
} }

View File

@@ -71,7 +71,7 @@ namespace GitHub.Runner.Common
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds). // Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms. // Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start, // In this way, customer still can get instance live console output on job start,
// at the same time we can cut the load to server after the build run for more than 60s // at the same time we can cut the load to server after the build run for more than 60s
private int _webConsoleLineAggressiveDequeueCount = 0; private int _webConsoleLineAggressiveDequeueCount = 0;
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60; private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
@@ -89,10 +89,6 @@ namespace GitHub.Runner.Common
{ {
Trace.Entering(); Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess) if (_queueInProcess)
{ {
Trace.Info("No-opt, all queue process tasks are running."); Trace.Info("No-opt, all queue process tasks are running.");
@@ -160,9 +156,6 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true); await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained."); Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained."); Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
} }
@@ -299,7 +292,15 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken)); // we will not requeue failed batch, since the web console lines are time sensitive.
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs) if (_firstConsoleOutputs)
{ {
@@ -488,8 +489,8 @@ namespace GitHub.Runner.Common
if (runOnce) if (runOnce)
{ {
// continue process timeline records update, // continue process timeline records update,
// we might have more records need update, // we might have more records need update,
// since we just create a new sub-timeline // since we just create a new sub-timeline
if (pendingSubtimelineUpdate) if (pendingSubtimelineUpdate)
{ {

View File

@@ -1,16 +0,0 @@
using System;
using System.Collections.ObjectModel;
namespace GitHub.Runner.Common.Util
{
public static class NodeUtil
{
private const string _defaultNodeVersion = "node16";
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"});
public static string GetInternalNodeVersion()
{
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
}
}
}

View File

@@ -10,7 +10,6 @@ using System.Net.NetworkInformation;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
@@ -315,12 +314,12 @@ namespace GitHub.Runner.Listener.Check
}); });
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert"); var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' "); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}"); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync( await processInvoker.ExecuteAsync(
hostContext.GetDirectory(WellKnownDirectory.Root), hostContext.GetDirectory(WellKnownDirectory.Root),
node, node12,
$"\"{downloadCertScript}\"", $"\"{downloadCertScript}\"",
env, env,
true, true,

View File

@@ -6,7 +6,6 @@ using System.Net;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
namespace GitHub.Runner.Listener.Check namespace GitHub.Runner.Listener.Check
@@ -145,12 +144,12 @@ namespace GitHub.Runner.Listener.Check
}); });
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js"); var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' "); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}"); result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync( await processInvoker.ExecuteAsync(
HostContext.GetDirectory(WellKnownDirectory.Root), HostContext.GetDirectory(WellKnownDirectory.Root),
node, node12,
$"\"{makeWebRequestScript}\"", $"\"{makeWebRequestScript}\"",
env, env,
true, true,

View File

@@ -29,7 +29,6 @@ namespace GitHub.Runner.Listener
{ {
Constants.Runner.CommandLine.Flags.Check, Constants.Runner.CommandLine.Flags.Check,
Constants.Runner.CommandLine.Flags.Commit, Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral, Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Help, Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Once, Constants.Runner.CommandLine.Flags.Once,
@@ -69,7 +68,6 @@ namespace GitHub.Runner.Listener
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended); public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version); public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral); public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
// Keep this around since customers still relies on it // Keep this around since customers still relies on it
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once); public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);

View File

@@ -196,7 +196,6 @@ namespace GitHub.Runner.Listener.Configuration
TaskAgent agent; TaskAgent agent;
while (true) while (true)
{ {
runnerSettings.DisableUpdate = command.DisableUpdate;
runnerSettings.Ephemeral = command.Ephemeral; runnerSettings.Ephemeral = command.Ephemeral;
runnerSettings.AgentName = command.GetRunnerName(); runnerSettings.AgentName = command.GetRunnerName();
@@ -214,22 +213,11 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace()) if (command.GetReplace())
{ {
// Update existing agent with new PublicKey, agent version. // Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate); agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
try try
{ {
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent); agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
}
if (command.Ephemeral &&
command.Ephemeral != agent.Ephemeral)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
}
_term.WriteSuccessMessage("Successfully replaced the runner"); _term.WriteSuccessMessage("Successfully replaced the runner");
break; break;
} }
@@ -248,22 +236,11 @@ namespace GitHub.Runner.Listener.Configuration
else else
{ {
// Create a new agent. // Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate); agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
try try
{ {
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent); agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
}
if (command.Ephemeral &&
command.Ephemeral != agent.Ephemeral)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
}
_term.WriteSuccessMessage("Runner successfully added"); _term.WriteSuccessMessage("Runner successfully added");
break; break;
} }
@@ -489,7 +466,7 @@ namespace GitHub.Runner.Listener.Configuration
} }
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate) private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{ {
ArgUtil.NotNull(agent, nameof(agent)); ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization agent.Authorization = new TaskAgentAuthorization
@@ -501,7 +478,6 @@ namespace GitHub.Runner.Listener.Configuration
agent.Version = BuildConstants.RunnerPackage.Version; agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription; agent.OSDescription = RuntimeInformation.OSDescription;
agent.Ephemeral = ephemeral; agent.Ephemeral = ephemeral;
agent.DisableUpdate = disableUpdate;
agent.MaxParallelism = 1; agent.MaxParallelism = 1;
agent.Labels.Clear(); agent.Labels.Clear();
@@ -518,7 +494,7 @@ namespace GitHub.Runner.Listener.Configuration
return agent; return agent;
} }
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate) private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{ {
TaskAgent agent = new TaskAgent(agentName) TaskAgent agent = new TaskAgent(agentName)
{ {
@@ -530,7 +506,6 @@ namespace GitHub.Runner.Listener.Configuration
Version = BuildConstants.RunnerPackage.Version, Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription, OSDescription = RuntimeInformation.OSDescription,
Ephemeral = ephemeral, Ephemeral = ephemeral,
DisableUpdate = disableUpdate
}; };
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System)); agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -613,52 +588,32 @@ namespace GitHub.Runner.Listener.Configuration
throw new ArgumentException($"'{githubUrl}' should point to an org or repository."); throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
} }
int retryCount = 0; using (var httpClientHandler = HostContext.CreateHttpClientHandler())
while(retryCount < 3) using (var httpClient = new HttpClient(httpClientHandler))
{ {
using (var httpClientHandler = HostContext.CreateHttpClientHandler()) var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
using (var httpClient = new HttpClient(httpClientHandler)) HostContext.SecretMasker.AddValue(base64EncodingToken);
{ httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}")); httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
HostContext.SecretMasker.AddValue(base64EncodingToken); httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
if (response.IsSuccessStatusCode) var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'"); if (response.IsSuccessStatusCode)
var jsonResponse = await response.Content.ReadAsStringAsync(); {
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse); Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
} var jsonResponse = await response.Content.ReadAsStringAsync();
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound) return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
{ }
// It doesn't make sense to retry in this case, so just stop else
break; {
} _term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
else var errorResponse = await response.Content.ReadAsStringAsync();
{ _term.WriteError(errorResponse);
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'"); response.EnsureSuccessStatusCode();
var errorResponse = await response.Content.ReadAsStringAsync(); return null;
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
Trace.Error(ex);
Trace.Info("Retrying in 5 seconds");
}
} }
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
await Task.Delay(backOff);
} }
return null;
} }
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent) private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
@@ -674,57 +629,35 @@ namespace GitHub.Runner.Listener.Configuration
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration"; githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
} }
int retryCount = 0; using (var httpClientHandler = HostContext.CreateHttpClientHandler())
while (retryCount < 3) using (var httpClient = new HttpClient(httpClientHandler))
{ {
using (var httpClientHandler = HostContext.CreateHttpClientHandler()) httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
using (var httpClient = new HttpClient(httpClientHandler)) httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var bodyObject = new Dictionary<string, string>()
{ {
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken); {"url", githubUrl},
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents); {"runner_event", runnerEvent}
};
var bodyObject = new Dictionary<string, string>() var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
{
{"url", githubUrl},
{"runner_event", runnerEvent}
};
try if (response.IsSuccessStatusCode)
{ {
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json")); Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
if(response.IsSuccessStatusCode) return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
{ }
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'"); else
var jsonResponse = await response.Content.ReadAsStringAsync(); {
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse); _term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
} var errorResponse = await response.Content.ReadAsStringAsync();
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound) _term.WriteError(errorResponse);
{ response.EnsureSuccessStatusCode();
// It doesn't make sense to retry in this case, so just stop return null;
break;
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
Trace.Error(ex);
Trace.Info("Retrying in 5 seconds");
}
} }
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
await Task.Delay(backOff);
} }
return null;
} }
} }
} }

View File

@@ -48,12 +48,13 @@ namespace GitHub.Runner.Listener.Configuration
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-"); string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName); serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
if (serviceName.Length > MaxServiceNameLength)
if (serviceName.Length > 80)
{ {
Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name."); Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name.");
// Add 5 to add -xxxx random number on the end
int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5; int exceededCharLength = serviceName.Length - 80;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters); string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45);
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length; exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
@@ -65,10 +66,6 @@ namespace GitHub.Runner.Listener.Configuration
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength); runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
} }
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
var random = new Random();
var num = random.Next(1000, 9999).ToString();
runnerNameSubstring +=$"-{num}";
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring); serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
} }
@@ -76,12 +73,5 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration."); Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
} }
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
const int MaxServiceNameLength = 80;
const int MaxRepoOrgCharacters = 45;
#endif
} }
} }

View File

@@ -285,7 +285,7 @@ namespace GitHub.Runner.Listener
{ {
// at this point, the job execution might encounter some dead lock and even not able to be cancelled. // at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// no need to localize the exception string should never happen. // no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds."); throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
} }
} }
else else
@@ -346,10 +346,7 @@ namespace GitHub.Runner.Listener
} }
var term = HostContext.GetService<ITerminal>(); var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
string workflowName = message.Variables["system.workflowFilePath"].Value.Split('/').LastOrDefault();
string additionalInfo = string.IsNullOrEmpty(workflowName) ? $"(in repository \"{_runnerSettings.RepoOrOrgName}\")" : $"(workflow \"{workflowName}\" in repository \"{_runnerSettings.RepoOrOrgName}\")";
term.WriteLine($"{DateTime.UtcNow:u}: Running job: \"{message.JobDisplayName}\" {additionalInfo}");
// first job request renew succeed. // first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>(); TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
@@ -366,7 +363,7 @@ namespace GitHub.Runner.Listener
Trace.Info($"Start renew job request {requestId} for job {message.JobId}."); Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token); Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled // wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail // not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken)); await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
@@ -534,7 +531,7 @@ namespace GitHub.Runner.Listener
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode); TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}"); Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {result}"); term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}."); Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock // stop renew lock
@@ -630,7 +627,7 @@ namespace GitHub.Runner.Listener
} }
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}"); Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {resultOnAbandonOrCancel}"); term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished. // complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}."); Trace.Info($"Stop renew job request for job {message.JobId}.");
@@ -707,7 +704,7 @@ namespace GitHub.Runner.Listener
{ {
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance(); // OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired. // Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}."); Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
return; return;
} }
catch (Exception ex) catch (Exception ex)
@@ -765,7 +762,7 @@ namespace GitHub.Runner.Listener
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
{ {
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}."); Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
} }
} }
else else

View File

@@ -1,18 +1,18 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.Common;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using System.Security.Cryptography;
using GitHub.Runner.Common; using System.IO;
using GitHub.Runner.Listener.Configuration; using System.Text;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth; using GitHub.Services.OAuth;
using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Listener namespace GitHub.Runner.Listener
{ {
@@ -33,7 +33,6 @@ namespace GitHub.Runner.Listener
private IRunnerServer _runnerServer; private IRunnerServer _runnerServer;
private TaskAgentSession _session; private TaskAgentSession _session;
private TimeSpan _getNextMessageRetryInterval; private TimeSpan _getNextMessageRetryInterval;
private bool _accessTokenRevoked = false;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30); private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
@@ -112,7 +111,6 @@ namespace GitHub.Runner.Listener
catch (TaskAgentAccessTokenExpiredException) catch (TaskAgentAccessTokenExpiredException)
{ {
Trace.Info("Runner OAuth token has been revoked. Session creation failed."); Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
_accessTokenRevoked = true;
throw; throw;
} }
catch (Exception ex) catch (Exception ex)
@@ -156,16 +154,9 @@ namespace GitHub.Runner.Listener
{ {
if (_session != null && _session.SessionId != Guid.Empty) if (_session != null && _session.SessionId != Guid.Empty)
{ {
if (!_accessTokenRevoked) using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
{ {
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
{
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
}
}
else
{
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
} }
} }
} }
@@ -214,7 +205,6 @@ namespace GitHub.Runner.Listener
catch (TaskAgentAccessTokenExpiredException) catch (TaskAgentAccessTokenExpiredException)
{ {
Trace.Info("Runner OAuth token has been revoked. Unable to pull message."); Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
_accessTokenRevoked = true;
throw; throw;
} }
catch (Exception ex) catch (Exception ex)

View File

@@ -12,7 +12,6 @@ using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using System.Linq; using System.Linq;
using GitHub.Runner.Listener.Check; using GitHub.Runner.Listener.Check;
using System.Collections.Generic;
namespace GitHub.Runner.Listener namespace GitHub.Runner.Listener
{ {
@@ -408,27 +407,6 @@ namespace GitHub.Runner.Listener
{ {
autoUpdateInProgress = true; autoUpdateInProgress = true;
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body); var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
#if DEBUG
// Can mock the update for testing
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
{
// The mock_update_messages.json file should be an object with keys being the current version and values being the targeted mock version object
// Example: { "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}
var mockUpdatesPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "mock_update_messages.json");
if (File.Exists(mockUpdatesPath))
{
var mockUpdateMessages = JsonUtility.FromString<Dictionary<string, AgentRefreshMessage>>(File.ReadAllText(mockUpdatesPath));
if (mockUpdateMessages.ContainsKey(BuildConstants.RunnerPackage.Version))
{
var mockTargetVersion = mockUpdateMessages[BuildConstants.RunnerPackage.Version].TargetVersion;
_term.WriteLine($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
Trace.Info($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
runnerUpdateMessage = new AgentRefreshMessage(runnerUpdateMessage.AgentId, mockTargetVersion, runnerUpdateMessage.Timeout);
}
}
}
#endif
var selfUpdater = HostContext.GetService<ISelfUpdater>(); var selfUpdater = HostContext.GetService<ISelfUpdater>();
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken); selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
Trace.Info("Refresh message received, kick-off selfupdate background process."); Trace.Info("Refresh message received, kick-off selfupdate background process.");
@@ -562,7 +540,6 @@ Config Options:
--work string Relative runner work directory (default {Constants.Path.WorkDirectory}) --work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false) --replace Replace any existing runner with the same name (default false)
--pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check` --pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check`
--disableupdate Disable self-hosted runner automatic update to the latest released version`
--ephemeral Configure the runner to only take one job and then let the service un-configure the runner after the job finishes (default false)"); --ephemeral Configure the runner to only take one job and then let the service un-configure the runner after the job finishes (default false)");
#if OS_WINDOWS #if OS_WINDOWS

View File

@@ -12,7 +12,6 @@ using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
@@ -105,7 +104,7 @@ namespace GitHub.Runner.Listener
} }
} }
await DownloadLatestRunner(token, updateMessage.TargetVersion); await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root."); Trace.Info($"Download latest runner and unzip into runner root.");
// wait till all running job finish // wait till all running job finish
@@ -207,7 +206,7 @@ namespace GitHub.Runner.Listener
/// </summary> /// </summary>
/// <param name="token"></param> /// <param name="token"></param>
/// <returns></returns> /// <returns></returns>
private async Task DownloadLatestRunner(CancellationToken token, string targetVersion) private async Task DownloadLatestRunner(CancellationToken token)
{ {
string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory); string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory);
IOUtil.DeleteDirectory(latestRunnerDirectory, token); IOUtil.DeleteDirectory(latestRunnerDirectory, token);
@@ -267,58 +266,15 @@ namespace GitHub.Runner.Listener
try try
{ {
#if DEBUG archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
// Much of the update process (targetVersion, archive) is server-side, this is a way to control it from here for testing specific update scenarios
// Add files like 'runner2.281.2.tar.gz' or 'runner2.283.0.zip' (depending on your platform) to your runner root folder
// Note that runners still need to be older than the server's runner version in order to receive an 'AgentRefreshMessage' and trigger this update
// Wrapped in #if DEBUG as this should not be in the RELEASE build
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
{
var waitForDebugger = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE_WAIT_FOR_DEBUGGER"));
if (waitForDebugger)
{
int waitInSeconds = 20;
while (!Debugger.IsAttached && waitInSeconds-- > 0)
{
await Task.Delay(1000);
}
Debugger.Break();
}
if (_targetPackage.Platform.StartsWith("win"))
{
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.zip");
}
else
{
archiveFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"runner{targetVersion}.tar.gz");
}
if (File.Exists(archiveFile))
{
_updateTrace.Enqueue($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
_terminal.WriteLine($"Mocking update with file: '{archiveFile}' and targetVersion: '{targetVersion}', nothing is downloaded");
}
else
{
archiveFile = null;
_terminal.WriteLine($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
_updateTrace.Enqueue($"Mock runner archive not found at {archiveFile} for target version {targetVersion}, proceeding with download instead");
}
}
#endif
// archiveFile is not null only if we mocked it above
if (string.IsNullOrEmpty(archiveFile)) if (string.IsNullOrEmpty(archiveFile))
{ {
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token); throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
if (string.IsNullOrEmpty(archiveFile))
{
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
}
await ValidateRunnerHash(archiveFile, packageHashValue);
} }
await ValidateRunnerHash(archiveFile, packageHashValue);
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token); await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
} }
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed) catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
@@ -504,7 +460,7 @@ namespace GitHub.Runner.Listener
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
{ {
Trace.Info($"Runner download has been cancelled."); Trace.Info($"Runner download has been canceled.");
throw; throw;
} }
catch (Exception ex) catch (Exception ex)
@@ -805,7 +761,7 @@ namespace GitHub.Runner.Listener
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token); IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
// try run node.js to see if current node.js works fine after copy over to new location. // try run node.js to see if current node.js works fine after copy over to new location.
var nodeVersions = NodeUtil.BuiltInNodeVersions; var nodeVersions = new[] { "node12", "node16" };
foreach (var nodeVersion in nodeVersions) foreach (var nodeVersion in nodeVersions)
{ {
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}"); var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
@@ -1070,7 +1026,7 @@ namespace GitHub.Runner.Listener
var stopWatch = Stopwatch.StartNew(); var stopWatch = Stopwatch.StartNew();
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin); string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles"); string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty; var hashResult = string.Empty;
@@ -1104,8 +1060,6 @@ namespace GitHub.Runner.Listener
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"", arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env, environment: env,
requireExitCodeZero: false, requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: token); cancellationToken: token);
if (exitCode != 0) if (exitCode != 0)

View File

@@ -469,7 +469,7 @@ namespace GitHub.Runner.Plugins.Artifact
try try
{ {
uploadTimer.Restart(); uploadTimer.Restart();
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token)) using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token, chunkSize: 4 * 1024 * 1024))
{ {
if (response == null || response.StatusCode != HttpStatusCode.Created) if (response == null || response.StatusCode != HttpStatusCode.Created)
{ {

View File

@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
} }
else else
{ {
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time. // delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock"); string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile)) if (File.Exists(lockFile))
{ {
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
} }
} }
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time. // delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile)) if (File.Exists(shallowLockFile))
{ {

View File

@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
} }
else else
{ {
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time. // delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock"); string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile)) if (File.Exists(lockFile))
{ {
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
} }
} }
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time. // delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile)) if (File.Exists(shallowLockFile))
{ {

View File

@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
} }
// Create a new token source for the parallel query. The parallel query should be // Create a new token source for the parallel query. The parallel query should be
// cancelled after the first error is encountered. Otherwise the number of exceptions // canceled after the first error is encountered. Otherwise the number of exceptions
// could get out of control for a large directory with access denied on every file. // could get out of control for a large directory with access denied on every file.
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
{ {

View File

@@ -178,7 +178,7 @@ namespace GitHub.Runner.Worker
Message = $"Invoked ::stopCommand:: with token: [{stopToken}]", Message = $"Invoked ::stopCommand:: with token: [{stopToken}]",
Type = JobTelemetryType.ActionCommand Type = JobTelemetryType.ActionCommand
}; };
context.Global.JobTelemetry.Add(telemetry); context.JobTelemetry.Add(telemetry);
} }
if (isTokenInvalid && !allowUnsecureStopCommandTokens) if (isTokenInvalid && !allowUnsecureStopCommandTokens)

View File

@@ -651,10 +651,10 @@ namespace GitHub.Runner.Worker
{ {
try try
{ {
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken); actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break; break;
} }
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled. catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
{ {
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry // UnresolvableActionDownloadInfoException is a 422 client error, don't retry
// Some possible cases are: // Some possible cases are:

View File

@@ -1,17 +1,18 @@
using System; using System;
using System.Collections.Generic; using System.IO;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating; using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens; using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines; using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData; using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating; using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers; using GitHub.Runner.Worker.Handlers;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Collections.Generic;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
{ {
@@ -140,7 +141,21 @@ namespace GitHub.Runner.Worker
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>(); IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
ExecutionContext.WriteWebhookPayload(); // Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository // Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction && if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
@@ -259,8 +274,8 @@ namespace GitHub.Runner.Worker
actionDirectory: definition.Directory, actionDirectory: definition.Directory,
localActionContainerSetupSteps: localActionContainerSetupSteps); localActionContainerSetupSteps: localActionContainerSetupSteps);
// Print out action details and log telemetry // Print out action details
handler.PrepareExecution(Stage); handler.PrintActionDetails(Stage);
// Run the task. // Run the task.
try try

View File

@@ -15,8 +15,8 @@ using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData; using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating; using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container; using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
@@ -52,7 +52,8 @@ namespace GitHub.Runner.Worker
Dictionary<string, string> IntraActionState { get; } Dictionary<string, string> IntraActionState { get; }
Dictionary<string, VariableValue> JobOutputs { get; } Dictionary<string, VariableValue> JobOutputs { get; }
ActionsEnvironmentReference ActionsEnvironment { get; } ActionsEnvironmentReference ActionsEnvironment { get; }
ActionsStepTelemetry StepTelemetry { get; } List<ActionsStepTelemetry> ActionsStepsTelemetry { get; }
List<JobTelemetry> JobTelemetry { get; }
DictionaryContextData ExpressionValues { get; } DictionaryContextData ExpressionValues { get; }
IList<IFunctionInfo> ExpressionFunctions { get; } IList<IFunctionInfo> ExpressionFunctions { get; }
JobContext JobContext { get; } JobContext JobContext { get; }
@@ -108,17 +109,12 @@ namespace GitHub.Runner.Worker
// others // others
void ForceTaskComplete(); void ForceTaskComplete();
void RegisterPostJobStep(IStep step); void RegisterPostJobStep(IStep step);
void PublishStepTelemetry();
void WriteWebhookPayload();
} }
public sealed class ExecutionContext : RunnerService, IExecutionContext public sealed class ExecutionContext : RunnerService, IExecutionContext
{ {
private const int _maxIssueCount = 10; private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private const int _maxIssueMessageLength = 4096; // Don't send issue with huge message since we can't forward them from actions to check annotation.
private const int _maxIssueCountInTelemetry = 3; // Only send the first 3 issues to telemetry
private const int _maxIssueMessageLengthInTelemetry = 256; // Only send the first 256 characters of issue message to telemetry
private readonly TimelineRecord _record = new TimelineRecord(); private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>(); private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
@@ -143,7 +139,6 @@ namespace GitHub.Runner.Worker
// only job level ExecutionContext will track throttling delay. // only job level ExecutionContext will track throttling delay.
private long _totalThrottlingDelayInMilliseconds = 0; private long _totalThrottlingDelayInMilliseconds = 0;
private bool _stepTelemetryPublished = false;
public Guid Id => _record.Id; public Guid Id => _record.Id;
public Guid EmbeddedId { get; private set; } public Guid EmbeddedId { get; private set; }
@@ -157,7 +152,8 @@ namespace GitHub.Runner.Worker
public Dictionary<string, VariableValue> JobOutputs { get; private set; } public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public ActionsEnvironmentReference ActionsEnvironment { get; private set; } public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
public ActionsStepTelemetry StepTelemetry { get; } = new ActionsStepTelemetry(); public List<ActionsStepTelemetry> ActionsStepsTelemetry { get; private set; }
public List<JobTelemetry> JobTelemetry { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData(); public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>(); public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
@@ -277,9 +273,9 @@ namespace GitHub.Runner.Worker
{ {
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack."); Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
} }
else else
{ {
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition; Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
} }
return; return;
} }
@@ -298,20 +294,7 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step); Root.PostJobSteps.Push(step);
} }
public IExecutionContext CreateChild( public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
Guid recordId,
string displayName,
string refName,
string scopeName,
string contextName,
ActionRunStage stage,
Dictionary<string, string> intraActionState = null,
int? recordOrder = null,
IPagingLogger logger = null,
bool isEmbedded = false,
CancellationTokenSource cancellationTokenSource = null,
Guid embeddedId = default(Guid),
string siblingScopeName = null)
{ {
Trace.Entering(); Trace.Entering();
@@ -323,6 +306,7 @@ namespace GitHub.Runner.Worker
child.Stage = stage; child.Stage = stage;
child.EmbeddedId = embeddedId; child.EmbeddedId = embeddedId;
child.SiblingScopeName = siblingScopeName; child.SiblingScopeName = siblingScopeName;
child.JobTelemetry = JobTelemetry;
if (intraActionState == null) if (intraActionState == null)
{ {
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -362,9 +346,6 @@ namespace GitHub.Runner.Worker
} }
child.IsEmbedded = isEmbedded; child.IsEmbedded = isEmbedded;
child.StepTelemetry.StepId = recordId;
child.StepTelemetry.Stage = stage.ToString();
child.StepTelemetry.IsEmbedded = isEmbedded;
return child; return child;
} }
@@ -373,13 +354,7 @@ namespace GitHub.Runner.Worker
/// An embedded execution context shares the same record ID, record name, logger, /// An embedded execution context shares the same record ID, record name, logger,
/// and a linked cancellation token. /// and a linked cancellation token.
/// </summary> /// </summary>
public IExecutionContext CreateEmbeddedChild( public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
string scopeName,
string contextName,
Guid embeddedId,
ActionRunStage stage,
Dictionary<string, string> intraActionState = null,
string siblingScopeName = null)
{ {
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName); return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
} }
@@ -429,8 +404,6 @@ namespace GitHub.Runner.Worker
} }
} }
PublishStepTelemetry();
if (Root != this) if (Root != this)
{ {
// only dispose TokenSource for step level ExecutionContext // only dispose TokenSource for step level ExecutionContext
@@ -546,20 +519,11 @@ namespace GitHub.Runner.Worker
} }
issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message); issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message);
if (issue.Message.Length > _maxIssueMessageLength)
{
issue.Message = issue.Message[.._maxIssueMessageLength];
}
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
if (_record.Order != null)
{
issue.Data["stepNumber"] = _record.Order.ToString();
}
if (issue.Type == IssueType.Error) if (issue.Type == IssueType.Error)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Error, logMessage); long logLineNumber = Write(WellKnownTags.Error, logMessage);
@@ -575,6 +539,8 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Warning) else if (issue.Type == IssueType.Warning)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Warning, logMessage); long logLineNumber = Write(WellKnownTags.Warning, logMessage);
@@ -590,6 +556,9 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Notice) else if (issue.Type == IssueType.Notice)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Notice, logMessage); long logLineNumber = Write(WellKnownTags.Notice, logMessage);
@@ -679,29 +648,22 @@ namespace GitHub.Runner.Worker
// Variables // Variables
Global.Variables = new Variables(HostContext, message.Variables); Global.Variables = new Variables(HostContext, message.Variables);
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
{
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
}
// Environment variables shared across all actions // Environment variables shared across all actions
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer); Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
// Job defaults shared across all actions // Job defaults shared across all actions
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase); Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
// Job Telemetry
Global.JobTelemetry = new List<JobTelemetry>();
// ActionsStepTelemetry for entire job
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
// Job Outputs // Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase); JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
// Actions environment // Actions environment
ActionsEnvironment = message.ActionsEnvironment; ActionsEnvironment = message.ActionsEnvironment;
// ActionsStepTelemetry
ActionsStepsTelemetry = new List<ActionsStepTelemetry>();
JobTelemetry = new List<JobTelemetry>();
// Service container info // Service container info
Global.ServiceContainers = new List<ContainerInfo>(); Global.ServiceContainers = new List<ContainerInfo>();
@@ -933,83 +895,6 @@ namespace GitHub.Runner.Worker
return Root._matchers ?? Array.Empty<IssueMatcherConfig>(); return Root._matchers ?? Array.Empty<IssueMatcherConfig>();
} }
public void PublishStepTelemetry()
{
if (!_stepTelemetryPublished)
{
// Add to the global steps telemetry only if we have something to log.
if (!string.IsNullOrEmpty(StepTelemetry?.Type))
{
if (!IsEmbedded)
{
StepTelemetry.Result = _record.Result;
}
if (!IsEmbedded &&
_record.FinishTime != null &&
_record.StartTime != null)
{
StepTelemetry.ExecutionTimeInSeconds = (int)Math.Ceiling((_record.FinishTime - _record.StartTime)?.TotalSeconds ?? 0);
}
if (!IsEmbedded &&
_record.Issues.Count > 0)
{
foreach (var issue in _record.Issues)
{
if ((issue.Type == IssueType.Error || issue.Type == IssueType.Warning) &&
!string.IsNullOrEmpty(issue.Message))
{
string issueTelemetry;
if (issue.Message.Length > _maxIssueMessageLengthInTelemetry)
{
issueTelemetry = $"{issue.Message[.._maxIssueMessageLengthInTelemetry]}";
}
else
{
issueTelemetry = issue.Message;
}
StepTelemetry.ErrorMessages.Add(issueTelemetry);
// Only send over the first 3 issues to avoid sending too much data.
if (StepTelemetry.ErrorMessages.Count >= _maxIssueCountInTelemetry)
{
break;
}
}
}
}
Trace.Info($"Publish step telemetry for current step {StringUtil.ConvertToJson(StepTelemetry)}.");
Global.StepsTelemetry.Add(StepTelemetry);
_stepTelemetryPublished = true;
}
}
else
{
Trace.Info($"Step telemetry has already been published.");
}
}
public void WriteWebhookPayload()
{
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
SetGitHubContext("event_path", workflowFile);
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order) private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{ {
_mainTimelineId = timelineId; _mainTimelineId = timelineId;

View File

@@ -7,7 +7,6 @@ using GitHub.Runner.Sdk;
using System.Reflection; using System.Reflection;
using System.Threading; using System.Threading;
using System.Collections.Generic; using System.Collections.Generic;
using GitHub.Runner.Common.Util;
namespace GitHub.Runner.Worker.Expressions namespace GitHub.Runner.Worker.Expressions
{ {
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker.Expressions
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location); string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName; string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles"); string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty; var hashResult = string.Empty;
var p = new ProcessInvoker(new HashFilesTrace(context.Trace)); var p = new ProcessInvoker(new HashFilesTrace(context.Trace));

View File

@@ -181,10 +181,6 @@ namespace GitHub.Runner.Worker
{ {
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'"); throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
} }
if (newline == null)
{
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
}
endIndex = index - newline.Length; endIndex = index - newline.Length;
tempLine = ReadLine(text, ref index, out newline); tempLine = ReadLine(text, ref index, out newline);
} }
@@ -263,72 +259,4 @@ namespace GitHub.Runner.Worker
return text.Substring(originalIndex, lfIndex - originalIndex); return text.Substring(originalIndex, lfIndex - originalIndex);
} }
} }
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
{
private const int _attachmentSizeLimit = 128 * 1024;
public string ContextName => "step_summary";
public string FilePrefix => "step_summary_";
public Type ExtensionType => typeof(IFileCommandExtension);
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
{
if (!context.Global.Variables.GetBoolean("DistributedTask.UploadStepSummary") ?? true)
{
Trace.Info("Step Summary is disabled; skipping attachment upload");
return;
}
if (String.IsNullOrEmpty(filePath) || !File.Exists(filePath))
{
Trace.Info($"Step Summary file ({filePath}) does not exist; skipping attachment upload");
return;
}
try
{
var fileSize = new FileInfo(filePath).Length;
if (fileSize == 0)
{
Trace.Info($"Step Summary file ({filePath}) is empty; skipping attachment upload");
return;
}
if (fileSize > _attachmentSizeLimit)
{
context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, _attachmentSizeLimit / 1024, fileSize / 1024));
Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload");
return;
}
Trace.Verbose($"Step Summary file exists: {filePath} and has a file size of {fileSize} bytes");
var scrubbedFilePath = filePath + "-scrubbed";
using (var streamReader = new StreamReader(filePath))
using (var streamWriter = new StreamWriter(scrubbedFilePath))
{
string line;
while ((line = streamReader.ReadLine()) != null)
{
var maskedLine = HostContext.SecretMasker.MaskSecrets(line);
streamWriter.WriteLine(maskedLine);
}
}
var attachmentName = context.Id.ToString();
Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})");
// Attachments must be added to the parent context (job), not the current context (step)
context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath);
}
catch (Exception e)
{
Trace.Error($"Error while processing file ({filePath}): {e}");
context.Error($"Failed to create step summary using 'GITHUB_STEP_SUMMARY': {e.Message}");
}
}
}
} }

View File

@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
{ {
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase) private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{ {
"action",
"action_path", "action_path",
"action_ref", "action_ref",
"action_repository", "action_repository",
"action",
"actor", "actor",
"api_url", "api_url",
"base_ref", "base_ref",
@@ -22,20 +22,18 @@ namespace GitHub.Runner.Worker
"head_ref", "head_ref",
"job", "job",
"path", "path",
"ref",
"ref_name", "ref_name",
"ref_protected", "ref_protected",
"ref_type", "ref_type",
"ref",
"repository_owner",
"repository", "repository",
"repository_owner",
"retention_days", "retention_days",
"run_attempt", "run_attempt",
"run_id", "run_id",
"run_number", "run_number",
"server_url", "server_url",
"sha", "sha",
"step_summary",
"triggering_actor",
"workflow", "workflow",
"workspace", "workspace",
}; };

View File

@@ -14,8 +14,6 @@ namespace GitHub.Runner.Worker
public PlanFeatures Features { get; set; } public PlanFeatures Features { get; set; }
public IList<String> FileTable { get; set; } public IList<String> FileTable { get; set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; } public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
public List<JobTelemetry> JobTelemetry { get; set; }
public TaskOrchestrationPlanReference Plan { get; set; } public TaskOrchestrationPlanReference Plan { get; set; }
public List<string> PrependPath { get; set; } public List<string> PrependPath { get; set; }
public List<ContainerInfo> ServiceContainers { get; set; } public List<ContainerInfo> ServiceContainers { get; set; }

View File

@@ -42,7 +42,7 @@ namespace GitHub.Runner.Worker.Handlers
{ {
ArgUtil.NotNull(Data.PreSteps, nameof(Data.PreSteps)); ArgUtil.NotNull(Data.PreSteps, nameof(Data.PreSteps));
steps = Data.PreSteps; steps = Data.PreSteps;
} }
else if (stage == ActionRunStage.Post) else if (stage == ActionRunStage.Post)
{ {
ArgUtil.NotNull(Data.PostSteps, nameof(Data.PostSteps)); ArgUtil.NotNull(Data.PostSteps, nameof(Data.PostSteps));
@@ -60,14 +60,14 @@ namespace GitHub.Runner.Worker.Handlers
Trace.Info($"Skipping executing post step id: {step.Id}, name: ${step.DisplayName}"); Trace.Info($"Skipping executing post step id: {step.Id}, name: ${step.DisplayName}");
} }
} }
} }
else else
{ {
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps)); ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
steps = Data.Steps; steps = Data.Steps;
} }
// Set extra telemetry base on the current context. // Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main) if (stage == ActionRunStage.Main)
{ {
var hasRunsStep = false; var hasRunsStep = false;
@@ -83,16 +83,20 @@ namespace GitHub.Runner.Worker.Handlers
hasUsesStep = true; hasUsesStep = true;
} }
} }
var pathReference = Action as Pipelines.RepositoryPathReference;
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre; var telemetry = new ActionsStepTelemetry {
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost; Ref = GetActionRef(),
HasPreStep = Data.HasPre,
ExecutionContext.StepTelemetry.HasRunsStep = hasRunsStep; HasPostStep = Data.HasPost,
ExecutionContext.StepTelemetry.HasUsesStep = hasUsesStep; IsEmbedded = ExecutionContext.IsEmbedded,
ExecutionContext.StepTelemetry.StepCount = steps.Count; Type = "composite",
HasRunsStep = hasRunsStep,
HasUsesStep = hasUsesStep,
StepCount = steps.Count
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
} }
ExecutionContext.StepTelemetry.Type = "composite";
try try
{ {
// Inputs of the composite step // Inputs of the composite step
@@ -113,7 +117,7 @@ namespace GitHub.Runner.Worker.Handlers
// Create embedded steps // Create embedded steps
var embeddedSteps = new List<IStep>(); var embeddedSteps = new List<IStep>();
// If we need to setup containers beforehand, do it // If we need to setup containers beforehand, do it
// only relevant for local composite actions that need to JIT download/setup containers // only relevant for local composite actions that need to JIT download/setup containers
if (LocalActionContainerSetupSteps != null && LocalActionContainerSetupSteps.Count > 0) if (LocalActionContainerSetupSteps != null && LocalActionContainerSetupSteps.Count > 0)
{ {
@@ -148,7 +152,7 @@ namespace GitHub.Runner.Worker.Handlers
} }
else else
{ {
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName); step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
} }
// Shallow copy github context // Shallow copy github context
@@ -299,13 +303,13 @@ namespace GitHub.Runner.Worker.Handlers
// Register job cancellation call back only if job cancellation token not been fire before each step run // Register job cancellation call back only if job cancellation token not been fire before each step run
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested) if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{ {
// Test the condition again. The job was cancelled after the condition was originally evaluated. // Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() => jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
{ {
// Mark job as cancelled // Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled; ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult(); ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'."); step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false; var conditionReTestResult = false;
@@ -389,7 +393,7 @@ namespace GitHub.Runner.Worker.Handlers
{ {
await RunStepAsync(step); await RunStepAsync(step);
} }
} }
finally finally
{ {
@@ -399,7 +403,7 @@ namespace GitHub.Runner.Worker.Handlers
jobCancelRegister = null; jobCancelRegister = null;
} }
} }
// Check failed or cancelled // Check failed or canceled
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled) if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
{ {
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'."); Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
@@ -454,7 +458,6 @@ namespace GitHub.Runner.Worker.Handlers
Trace.Info($"Step result: {step.ExecutionContext.Result}"); Trace.Info($"Step result: {step.ExecutionContext.Result}");
step.ExecutionContext.Debug($"Finished: {step.DisplayName}"); step.ExecutionContext.Debug($"Finished: {step.DisplayName}");
step.ExecutionContext.PublishStepTelemetry();
} }
private void SetStepConclusion(IStep step, TaskResult result) private void SetStepConclusion(IStep step, TaskResult result)

View File

@@ -1,14 +1,14 @@
using System; using System.Collections.Generic;
using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines.ContextData; using System;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container; using GitHub.Runner.Worker.Container;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -70,13 +70,18 @@ namespace GitHub.Runner.Worker.Handlers
} }
string type = Action.Type == Pipelines.ActionSourceType.Repository ? "Dockerfile" : "DockerHub"; string type = Action.Type == Pipelines.ActionSourceType.Repository ? "Dockerfile" : "DockerHub";
// Set extra telemetry base on the current context. // Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main) if (stage == ActionRunStage.Main)
{ {
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre; var telemetry = new ActionsStepTelemetry {
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost; Ref = GetActionRef(),
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = type
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
} }
ExecutionContext.StepTelemetry.Type = type;
// run container // run container
var container = new ContainerInfo(HostContext) var container = new ContainerInfo(HostContext)

View File

@@ -1,13 +1,13 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using System.Linq;
using GitHub.Runner.Common; using System.IO;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -22,7 +22,7 @@ namespace GitHub.Runner.Worker.Handlers
string ActionDirectory { get; set; } string ActionDirectory { get; set; }
List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; } List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
Task RunAsync(ActionRunStage stage); Task RunAsync(ActionRunStage stage);
void PrepareExecution(ActionRunStage stage); void PrintActionDetails(ActionRunStage stage);
} }
public abstract class Handler : RunnerService public abstract class Handler : RunnerService
@@ -36,7 +36,6 @@ namespace GitHub.Runner.Worker.Handlers
protected IActionCommandManager ActionCommandManager { get; private set; } protected IActionCommandManager ActionCommandManager { get; private set; }
public Pipelines.ActionStepDefinitionReference Action { get; set; } public Pipelines.ActionStepDefinitionReference Action { get; set; }
public bool IsActionStep => Action != null;
public Dictionary<string, string> Environment { get; set; } public Dictionary<string, string> Environment { get; set; }
public Variables RuntimeVariables { get; set; } public Variables RuntimeVariables { get; set; }
public IExecutionContext ExecutionContext { get; set; } public IExecutionContext ExecutionContext { get; set; }
@@ -45,50 +44,29 @@ namespace GitHub.Runner.Worker.Handlers
public string ActionDirectory { get; set; } public string ActionDirectory { get; set; }
public List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; } public List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
public void PrepareExecution(ActionRunStage stage) public virtual string GetActionRef()
{ {
// Print out action details if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
PrintActionDetails(stage);
// Get telemetry for the action
PopulateActionTelemetry(stage);
}
protected void PopulateActionTelemetry(ActionRunStage stage)
{
if (!IsActionStep)
{ {
ExecutionContext.StepTelemetry.Type = "runner";
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
}
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
ExecutionContext.StepTelemetry.Type = "docker";
var registryAction = Action as Pipelines.ContainerRegistryReference; var registryAction = Action as Pipelines.ContainerRegistryReference;
ExecutionContext.StepTelemetry.Action = registryAction.Image; return registryAction.Image;
}
else if (Action.Type == Pipelines.ActionSourceType.Script)
{
ExecutionContext.StepTelemetry.Type = "run";
} }
else if (Action.Type == Pipelines.ActionSourceType.Repository) else if (Action.Type == Pipelines.ActionSourceType.Repository)
{ {
ExecutionContext.StepTelemetry.Type = "repository";
var repoAction = Action as Pipelines.RepositoryPathReference; var repoAction = Action as Pipelines.RepositoryPathReference;
if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase)) if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{ {
ExecutionContext.StepTelemetry.Action = repoAction.Path; return repoAction.Path;
} }
else else
{ {
ExecutionContext.StepTelemetry.Ref = repoAction.Ref;
if (string.IsNullOrEmpty(repoAction.Path)) if (string.IsNullOrEmpty(repoAction.Path))
{ {
ExecutionContext.StepTelemetry.Action = repoAction.Name; return $"{repoAction.Name}@{repoAction.Ref}";
} }
else else
{ {
ExecutionContext.StepTelemetry.Action = $"{repoAction.Name}/{repoAction.Path}"; return $"{repoAction.Name}/{repoAction.Path}@{repoAction.Ref}";
} }
} }
} }
@@ -97,11 +75,11 @@ namespace GitHub.Runner.Worker.Handlers
// this should never happen // this should never happen
Trace.Error($"Can't generate ref for {Action.Type.ToString()}"); Trace.Error($"Can't generate ref for {Action.Type.ToString()}");
} }
return "";
} }
public virtual void PrintActionDetails(ActionRunStage stage)
protected virtual void PrintActionDetails(ActionRunStage stage)
{ {
if (stage == ActionRunStage.Post) if (stage == ActionRunStage.Post)
{ {
ExecutionContext.Output($"Post job cleanup."); ExecutionContext.Output($"Post job cleanup.");

View File

@@ -55,23 +55,7 @@ namespace GitHub.Runner.Worker.Handlers
else if (data.ExecutionType == ActionExecutionType.NodeJS) else if (data.ExecutionType == ActionExecutionType.NodeJS)
{ {
handler = HostContext.CreateService<INodeScriptActionHandler>(); handler = HostContext.CreateService<INodeScriptActionHandler>();
var nodeData = data as NodeJSActionExecutionData; (handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData;
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
{
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
if (!isOptOut)
{
nodeData.NodeVersion = "node16";
}
}
(handler as INodeScriptActionHandler).Data = nodeData;
} }
else if (data.ExecutionType == ActionExecutionType.Script) else if (data.ExecutionType == ActionExecutionType.Script)
{ {

View File

@@ -1,11 +1,12 @@
using System; using System.IO;
using System.IO;
using System.Linq;
using System.Text; using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using System;
using System.Linq;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -73,13 +74,19 @@ namespace GitHub.Runner.Worker.Handlers
target = Data.Post; target = Data.Post;
} }
// Set extra telemetry base on the current context. // Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main) if (stage == ActionRunStage.Main)
{ {
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre; var telemetry = new ActionsStepTelemetry
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost; {
Ref = GetActionRef(),
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = Data.NodeVersion
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
} }
ExecutionContext.StepTelemetry.Type = Data.NodeVersion;
ArgUtil.NotNullOrEmpty(target, nameof(target)); ArgUtil.NotNullOrEmpty(target, nameof(target));
target = Path.Combine(ActionDirectory, target); target = Path.Combine(ActionDirectory, target);

View File

@@ -1,7 +1,7 @@
using System; using System.Threading.Tasks;
using System.Threading.Tasks; using System;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Common;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
@@ -35,8 +35,6 @@ namespace GitHub.Runner.Worker.Handlers
} }
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin)); ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));
// Set extra telemetry base on the current context.
ExecutionContext.StepTelemetry.Type = plugin;
// Update the env dictionary. // Update the env dictionary.
AddPrependPathToEnvironment(); AddPrependPathToEnvironment();

View File

@@ -1,13 +1,12 @@
using System; using System;
using System.IO; using System.IO;
using System.Linq;
using System.Text; using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Linq;
using GitHub.DistributedTask.Pipelines.ContextData; using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
@@ -22,24 +21,18 @@ namespace GitHub.Runner.Worker.Handlers
{ {
public ScriptActionExecutionData Data { get; set; } public ScriptActionExecutionData Data { get; set; }
protected override void PrintActionDetails(ActionRunStage stage) public override void PrintActionDetails(ActionRunStage stage)
{ {
// if we're executing a Job Extension, we won't have an 'Action'
if (!IsActionStep) if (stage == ActionRunStage.Post)
{ {
if (Inputs.TryGetValue("path", out var path)) throw new NotSupportedException("Script action should not have 'Post' job action.");
{
ExecutionContext.Output($"##[group]Run '{path}'");
}
else
{
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
}
} }
else if (Action.Type == Pipelines.ActionSourceType.Script)
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
if (Action.Type == Pipelines.ActionSourceType.Script)
{ {
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n'); var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' }); var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0) if (firstNewLine >= 0)
@@ -48,16 +41,17 @@ namespace GitHub.Runner.Worker.Handlers
} }
ExecutionContext.Output($"##[group]Run {firstLine}"); ExecutionContext.Output($"##[group]Run {firstLine}");
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
} }
else else
{ {
throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}"); throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}");
}
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
} }
string argFormat; string argFormat;
@@ -137,6 +131,11 @@ namespace GitHub.Runner.Worker.Handlers
public async Task RunAsync(ActionRunStage stage) public async Task RunAsync(ActionRunStage stage)
{ {
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
// Validate args // Validate args
Trace.Entering(); Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
@@ -145,6 +144,17 @@ namespace GitHub.Runner.Worker.Handlers
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext; var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext)); ArgUtil.NotNull(githubContext, nameof(githubContext));
// Add Telemetry to JobContext to send with JobCompleteMessage
if (stage == ActionRunStage.Main)
{
var telemetry = new ActionsStepTelemetry
{
IsEmbedded = ExecutionContext.IsEmbedded,
Type = "run",
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
Inputs.TryGetValue("script", out var contents); Inputs.TryGetValue("script", out var contents);
@@ -212,35 +222,15 @@ namespace GitHub.Runner.Worker.Handlers
} }
} }
// Don't override runner telemetry here
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
{
ExecutionContext.StepTelemetry.Action = shellCommand;
}
// No arg format was given, shell must be a built-in // No arg format was given, shell must be a built-in
if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}")) if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}"))
{ {
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'"); throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
} }
string scriptFilePath, resolvedScriptPath;
if (IsActionStep) // We do not not the full path until we know what shell is being used, so that we can determine the file extension
{ var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
// We do not not the full path until we know what shell is being used, so that we can determine the file extension var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
}
else
{
// JobExtensionRunners run a script file, we load that from the inputs here
if (!Inputs.ContainsKey("path"))
{
throw new ArgumentException("Expected 'path' input to be set");
}
scriptFilePath = Inputs["path"];
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
}
// Format arg string with script path // Format arg string with script path
var arguments = string.Format(argFormat, resolvedScriptPath); var arguments = string.Format(argFormat, resolvedScriptPath);
@@ -256,12 +246,9 @@ namespace GitHub.Runner.Worker.Handlers
#else #else
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14). // Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
var encoding = new UTF8Encoding(false); var encoding = new UTF8Encoding(false);
#endif #endif
if (IsActionStep) // Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
{ File.WriteAllText(scriptFilePath, contents, encoding);
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
}
// Prepend PATH // Prepend PATH
AddPrependPathToEnvironment(); AddPrependPathToEnvironment();
@@ -284,10 +271,10 @@ namespace GitHub.Runner.Worker.Handlers
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
{ {
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process // launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}"); string node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js"); string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}"; arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
fileName = node; fileName = node12;
} }
#endif #endif
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));

View File

@@ -1,8 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -81,22 +79,5 @@ namespace GitHub.Runner.Worker.Handlers
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}"); throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
} }
} }
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
{
var format = "{0} {1}";
switch (Path.GetExtension(path))
{
case ".sh":
// use 'sh' args but prefer bash
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
return string.Format(format, pathToShell, _defaultArguments["sh"]);
case ".ps1":
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
default:
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
}
}
} }
} }

View File

@@ -15,7 +15,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
@@ -57,8 +56,6 @@ namespace GitHub.Runner.Worker
// Create a new timeline record for 'Set up job' // Create a new timeline record for 'Set up job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre); IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre);
context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "setup_job";
List<IStep> preJobSteps = new List<IStep>(); List<IStep> preJobSteps = new List<IStep>();
List<IStep> jobSteps = new List<IStep>(); List<IStep> jobSteps = new List<IStep>();
@@ -249,19 +246,6 @@ namespace GitHub.Runner.Worker
Trace.Info("Downloading actions"); Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>(); var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps); var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
// add hook to preJobSteps
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
if (!string.IsNullOrEmpty(startedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobStartedStepName,
data: (object)jobHookData));
}
preJobSteps.AddRange(prepareResult.ContainerSetupSteps); preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps // Add start-container steps, record and stop-container steps
@@ -329,8 +313,6 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName); ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
Guid stepId = Guid.NewGuid(); Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, stepId.ToString("N"), null, stepId.ToString("N"), ActionRunStage.Pre); extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, stepId.ToString("N"), null, stepId.ToString("N"), ActionRunStage.Pre);
extensionStep.ExecutionContext.StepTelemetry.Type = "runner";
extensionStep.ExecutionContext.StepTelemetry.Action = extensionStep.DisplayName.ToLowerInvariant().Replace(' ', '_');
} }
else if (step is IActionRunner actionStep) else if (step is IActionRunner actionStep)
{ {
@@ -351,18 +333,6 @@ namespace GitHub.Runner.Worker
} }
} }
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobCompletedStepName,
data: (object)jobHookData));
}
List<IStep> steps = new List<IStep>(); List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps); steps.AddRange(preJobSteps);
steps.AddRange(jobSteps); steps.AddRange(jobSteps);
@@ -431,8 +401,6 @@ namespace GitHub.Runner.Worker
// create a new timeline record node for 'Finalize job' // create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post); IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "complete_job";
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); })) using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{ {
try try

View File

@@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(JobHookProvider))]
public interface IJobHookProvider : IRunnerService
{
Task RunHook(IExecutionContext executionContext, object data);
}
public class JobHookData
{
public string Path {get; private set;}
public ActionRunStage Stage {get; private set;}
public JobHookData(ActionRunStage stage, string path)
{
Path = path;
Stage = stage;
}
}
public class JobHookProvider : RunnerService, IJobHookProvider
{
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
}
public async Task RunHook(IExecutionContext executionContext, object data)
{
// Get Inputs
var hookData = data as JobHookData;
ArgUtil.NotNull(hookData, nameof(JobHookData));
var displayName = hookData.Stage == ActionRunStage.Pre ? Constants.Hooks.JobStartedStepName : Constants.Hooks.JobCompletedStepName;
// Log to users so that they know how this step was injected
executionContext.Output($"A '{displayName}' has been configured by the self-hosted runner administrator");
// Validate script file.
if (!File.Exists(hookData.Path))
{
throw new FileNotFoundException("File doesn't exist");
}
executionContext.WriteWebhookPayload();
// Create the handler data.
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
var stepHost = HostContext.CreateService<IDefaultStepHost>();
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
Dictionary<string, string> inputs = new()
{
["path"] = hookData.Path,
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
};
// Create the handler
var handlerFactory = HostContext.GetService<IHandlerFactory>();
var handler = handlerFactory.Create(
executionContext,
action: null,
stepHost,
new ScriptActionExecutionData(),
inputs,
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
executionContext.Global.Variables,
actionDirectory: scriptDirectory,
localActionContainerSetupSteps: null);
handler.PrepareExecution(hookData.Stage);
// Setup file commands
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(executionContext, null);
// Run the step and process the file commands
try
{
await handler.RunAsync(hookData.Stage);
}
finally
{
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
}
}
}
}

View File

@@ -1,18 +1,18 @@
using System; using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common.Util;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Net.Http;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using System.Net.Http;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
{ {
@@ -25,7 +25,6 @@ namespace GitHub.Runner.Worker
public sealed class JobRunner : RunnerService, IJobRunner public sealed class JobRunner : RunnerService, IJobRunner
{ {
private IJobServerQueue _jobServerQueue; private IJobServerQueue _jobServerQueue;
private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager; private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken) public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
@@ -109,8 +108,8 @@ namespace GitHub.Runner.Worker
jobContext.SetRunnerContext("os", VarUtil.OS); jobContext.SetRunnerContext("os", VarUtil.OS);
jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture); jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture);
_runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings(); var runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
jobContext.SetRunnerContext("name", _runnerSettings.AgentName); jobContext.SetRunnerContext("name", runnerSettings.AgentName);
string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools); string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools);
Directory.CreateDirectory(toolsDirectory); Directory.CreateDirectory(toolsDirectory);
@@ -131,9 +130,9 @@ namespace GitHub.Runner.Worker
} }
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested) catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
{ {
// set the job to cancelled // set the job to canceled
// don't log error issue to job ExecutionContext, since server owns the job level issue // don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job is cancelled during initialize."); Trace.Error($"Job is canceled during initialize.");
Trace.Error($"Caught exception: {ex}"); Trace.Error($"Caught exception: {ex}");
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled); return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
} }
@@ -210,53 +209,6 @@ namespace GitHub.Runner.Worker
jobContext.Debug($"Finishing: {message.JobDisplayName}"); jobContext.Debug($"Finishing: {message.JobDisplayName}");
TaskResult result = jobContext.Complete(taskResult); TaskResult result = jobContext.Complete(taskResult);
if (_runnerSettings.DisableUpdate == true)
{
try
{
var currentVersion = new PackageVersion(BuildConstants.RunnerPackage.Version);
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
VssCredentials serverCredential = VssUtil.GetVssCredential(systemConnection);
var runnerServer = HostContext.GetService<IRunnerServer>();
await runnerServer.ConnectAsync(systemConnection.Url, serverCredential);
var serverPackages = await runnerServer.GetPackagesAsync("agent", BuildConstants.RunnerPackage.PackageName, 5, includeToken: false, cancellationToken: CancellationToken.None);
if (serverPackages.Count > 0)
{
serverPackages = serverPackages.OrderByDescending(x => x.Version).ToList();
Trace.Info($"Newer packages {StringUtil.ConvertToJson(serverPackages.Select(x => x.Version.ToString()))}");
var warnOnFailedJob = false; // any minor/patch version behind.
var warnOnOldRunnerVersion = false; // >= 2 minor version behind
if (serverPackages.Any(x => x.Version.CompareTo(currentVersion) > 0))
{
Trace.Info($"Current runner version {currentVersion} is behind the latest runner version {serverPackages[0].Version}.");
warnOnFailedJob = true;
}
if (serverPackages.Where(x => x.Version.Major == currentVersion.Major && x.Version.Minor > currentVersion.Minor).Count() > 1)
{
Trace.Info($"Current runner version {currentVersion} is way behind the latest runner version {serverPackages[0].Version}.");
warnOnOldRunnerVersion = true;
}
if (result == TaskResult.Failed && warnOnFailedJob)
{
jobContext.Warning($"This job failure may be caused by using an out of date self-hosted runner. You are currently using runner version {currentVersion}. Please update to the latest version {serverPackages[0].Version}");
}
else if (warnOnOldRunnerVersion)
{
jobContext.Warning($"This self-hosted runner is currently using runner version {currentVersion}. This version is out of date. Please update to the latest version {serverPackages[0].Version}");
}
}
}
catch (Exception ex)
{
// Ignore any error since suggest runner update is best effort.
Trace.Error($"Caught exception during runner version check: {ex}");
}
}
try try
{ {
await ShutdownQueue(throwOnFailure: true); await ShutdownQueue(throwOnFailure: true);
@@ -279,13 +231,14 @@ namespace GitHub.Runner.Worker
} }
// Load any upgrade telemetry // Load any upgrade telemetry
LoadFromTelemetryFile(jobContext.Global.JobTelemetry); LoadFromTelemetryFile(jobContext.JobTelemetry);
// Make sure we don't submit secrets as telemetry // Make sure we don't submit secrets as telemetry
MaskTelemetrySecrets(jobContext.Global.JobTelemetry); MaskTelemetrySecrets(jobContext.JobTelemetry);
Trace.Info("Raising job completed event.");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.ActionsStepsTelemetry, jobContext.JobTelemetry);
Trace.Info($"Raising job completed event");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment, jobContext.Global.StepsTelemetry, jobContext.Global.JobTelemetry);
var completeJobRetryLimit = 5; var completeJobRetryLimit = 5;
var exceptions = new List<Exception>(); var exceptions = new List<Exception>();

View File

@@ -1,5 +1,6 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -103,6 +104,10 @@ namespace GitHub.Runner.Worker
// Set GITHUB_ACTION // Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name); step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
var stepSummaryFilePath = CreateStepSummaryFile(step);
step.ExecutionContext.SetGitHubContext("step_summary", stepSummaryFilePath);
envContext["GITHUB_STEP_SUMMARY"] = new StringContextData(stepSummaryFilePath);
try try
{ {
// Evaluate and merge step env // Evaluate and merge step env
@@ -130,7 +135,7 @@ namespace GitHub.Runner.Worker
// Register job cancellation call back only if job cancellation token not been fire before each step run // Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested) if (!jobContext.CancellationToken.IsCancellationRequested)
{ {
// Test the condition again. The job was cancelled after the condition was originally evaluated. // Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() => jobCancelRegister = jobContext.CancellationToken.Register(() =>
{ {
// Mark job as cancelled // Mark job as cancelled
@@ -352,7 +357,52 @@ namespace GitHub.Runner.Worker
{ {
var executionContext = step.ExecutionContext; var executionContext = step.ExecutionContext;
CreateSummaryAttachment(step);
executionContext.Complete(result, resultCode: resultCode); executionContext.Complete(result, resultCode: resultCode);
} }
private string CreateStepSummaryFile(IStep step)
{
var stepSummaryDirectory = HostContext.GetDirectory(WellKnownDirectory.StepSummary);
if (!Directory.Exists(stepSummaryDirectory))
{
Trace.Info($"Creating step summary directory: {stepSummaryDirectory}");
Directory.CreateDirectory(stepSummaryDirectory);
}
var stepID = step.ExecutionContext.Id;
var stepSummaryFilePath = Path.Combine(stepSummaryDirectory, $"{stepID}.md");
Trace.Info($"Creating step summary file: {stepSummaryFilePath}");
File.Create(stepSummaryFilePath).Close();
return stepSummaryFilePath;
}
private void CreateSummaryAttachment(IStep step) {
var executionContext = step.ExecutionContext;
var parentContext = executionContext.Root;
var stepSummaryFilePath = executionContext.GetGitHubContext("step_summary");
var stepID = executionContext.Id;
Trace.Info($"Reading step summary data from {stepSummaryFilePath}");
var summaryExists = File.Exists(stepSummaryFilePath);
if (summaryExists)
{
Trace.Info($"File exists: {stepSummaryFilePath}");
var summaryFileIsEmpty = new FileInfo(stepSummaryFilePath).Length == 0;
if (summaryFileIsEmpty)
{
Trace.Info($"Summary file ({summaryFileIsEmpty}) is empty, skipping attachment upload");
}
else
{
Trace.Info($"Queueing file ({stepSummaryFilePath}) for attachment upload ({stepID})");
parentContext.QueueAttachFile(ChecksAttachmentType.StepSummary, stepID.ToString(), stepSummaryFilePath);
}
}
}
} }
} }

View File

@@ -1,4 +1,4 @@
/* /*
* --------------------------------------------------------- * ---------------------------------------------------------
* Copyright(C) Microsoft Corporation. All rights reserved. * Copyright(C) Microsoft Corporation. All rights reserved.
* --------------------------------------------------------- * ---------------------------------------------------------
@@ -324,7 +324,6 @@ namespace GitHub.DistributedTask.WebApi
/// <param name="scopeIdentifier">The project GUID to scope the request</param> /// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param> /// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param> /// <param name="planId"></param>
/// <param name="jobId"></param>
/// <param name="actionReferenceList"></param> /// <param name="actionReferenceList"></param>
/// <param name="userState"></param> /// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param> /// <param name="cancellationToken">The cancellation token to cancel operation.</param>
@@ -332,7 +331,6 @@ namespace GitHub.DistributedTask.WebApi
Guid scopeIdentifier, Guid scopeIdentifier,
string hubName, string hubName,
Guid planId, Guid planId,
Guid jobId,
ActionReferenceList actionReferenceList, ActionReferenceList actionReferenceList,
object userState = null, object userState = null,
CancellationToken cancellationToken = default) CancellationToken cancellationToken = default)
@@ -341,15 +339,12 @@ namespace GitHub.DistributedTask.WebApi
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb"); Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true)); HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
queryParams.Add("jobId", jobId);
return SendAsync<ActionDownloadInfoCollection>( return SendAsync<ActionDownloadInfoCollection>(
httpMethod, httpMethod,
locationId, locationId,
routeValues: routeValues, routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1), version: new ApiResourceVersion(6.0, 1),
queryParameters: queryParams,
userState: userState, userState: userState,
cancellationToken: cancellationToken, cancellationToken: cancellationToken,
content: content); content: content);

View File

@@ -1,6 +1,4 @@
using System; using System.Runtime.Serialization;
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi namespace GitHub.DistributedTask.WebApi
{ {
@@ -10,13 +8,6 @@ namespace GitHub.DistributedTask.WebApi
[DataContract] [DataContract]
public class ActionsStepTelemetry public class ActionsStepTelemetry
{ {
public ActionsStepTelemetry()
{
this.ErrorMessages = new List<string>();
}
[DataMember(EmitDefaultValue = false)]
public string Action { get; set; }
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public string Ref { get; set; } public string Ref { get; set; }
@@ -24,15 +15,9 @@ namespace GitHub.DistributedTask.WebApi
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public string Type { get; set; } public string Type { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Stage { get; set; }
[DataMember(EmitDefaultValue = false)]
public Guid StepId { get; set; }
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public bool? HasRunsStep { get; set; } public bool? HasRunsStep { get; set; }
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public bool? HasUsesStep { get; set; } public bool? HasUsesStep { get; set; }
@@ -47,14 +32,5 @@ namespace GitHub.DistributedTask.WebApi
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public int? StepCount { get; set; } public int? StepCount { get; set; }
[DataMember(EmitDefaultValue = false)]
public TaskResult? Result { get; set; }
[DataMember(EmitDefaultValue = false)]
public List<string> ErrorMessages { get; set; }
[DataMember(EmitDefaultValue = false)]
public int? ExecutionTimeInSeconds { get; set; }
} }
} }

View File

@@ -25,7 +25,6 @@ namespace GitHub.DistributedTask.WebApi
this.ProvisioningState = referenceToBeCloned.ProvisioningState; this.ProvisioningState = referenceToBeCloned.ProvisioningState;
this.AccessPoint = referenceToBeCloned.AccessPoint; this.AccessPoint = referenceToBeCloned.AccessPoint;
this.Ephemeral = referenceToBeCloned.Ephemeral; this.Ephemeral = referenceToBeCloned.Ephemeral;
this.DisableUpdate = referenceToBeCloned.DisableUpdate;
if (referenceToBeCloned.m_links != null) if (referenceToBeCloned.m_links != null)
{ {
@@ -93,16 +92,6 @@ namespace GitHub.DistributedTask.WebApi
set; set;
} }
/// <summary>
/// Whether or not this agent should auto-update to latest version.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool? DisableUpdate
{
get;
set;
}
/// <summary> /// <summary>
/// Whether or not the agent is online. /// Whether or not the agent is online.
/// </summary> /// </summary>

View File

@@ -66,7 +66,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_serviceControlManager = new Mock<ILinuxServiceControlManager>(); _serviceControlManager = new Mock<ILinuxServiceControlManager>();
#endif #endif
var expectedAgent = new TaskAgent(_expectedAgentName) { Id = 1, Ephemeral = true, DisableUpdate = true }; var expectedAgent = new TaskAgent(_expectedAgentName) { Id = 1 };
expectedAgent.Authorization = new TaskAgentAuthorization expectedAgent.Authorization = new TaskAgentAuthorization
{ {
ClientId = Guid.NewGuid(), ClientId = Guid.NewGuid(),
@@ -154,7 +154,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
tc, tc,
new[] new[]
{ {
"configure", "configure",
"--url", _expectedServerUrl, "--url", _expectedServerUrl,
"--name", _expectedAgentName, "--name", _expectedAgentName,
"--runnergroup", _secondRunnerGroupName, "--runnergroup", _secondRunnerGroupName,
@@ -163,8 +163,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
"--token", _expectedToken, "--token", _expectedToken,
"--labels", userLabels, "--labels", userLabels,
"--ephemeral", "--ephemeral",
"--disableupdate",
"--unattended",
}); });
trace.Info("Constructed."); trace.Info("Constructed.");
_store.Setup(x => x.IsConfigured()).Returns(false); _store.Setup(x => x.IsConfigured()).Returns(false);
@@ -187,7 +185,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
// validate GetAgentPoolsAsync gets called twice with automation pool type // validate GetAgentPoolsAsync gets called twice with automation pool type
_runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.Is<TaskAgentPoolType>(p => p == TaskAgentPoolType.Automation)), Times.Exactly(2)); _runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.Is<TaskAgentPoolType>(p => p == TaskAgentPoolType.Automation)), Times.Exactly(2));
var expectedLabels = new List<string>() { "self-hosted", VarUtil.OS, VarUtil.OSArchitecture }; var expectedLabels = new List<string>() { "self-hosted", VarUtil.OS, VarUtil.OSArchitecture};
expectedLabels.AddRange(userLabels.Split(",").ToList()); expectedLabels.AddRange(userLabels.Split(",").ToList());
_runnerServer.Verify(x => x.AddAgentAsync(It.IsAny<int>(), It.Is<TaskAgent>(a => a.Labels.Select(x => x.Name).ToHashSet().SetEquals(expectedLabels))), Times.Once); _runnerServer.Verify(x => x.AddAgentAsync(It.IsAny<int>(), It.Is<TaskAgent>(a => a.Labels.Select(x => x.Name).ToHashSet().SetEquals(expectedLabels))), Times.Once);

View File

@@ -1,18 +1,18 @@
using System; using GitHub.DistributedTask.WebApi;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using Moq; using Moq;
using System;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using Xunit; using Xunit;
using System.Threading;
using System.Reflection;
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography;
namespace GitHub.Runner.Common.Tests.Listener namespace GitHub.Runner.Common.Tests.Listener
{ {
@@ -256,67 +256,5 @@ namespace GitHub.Runner.Common.Tests.Listener
tokenSource.Token), Times.Once()); tokenSource.Token), Times.Once());
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void SkipDeleteSession_WhenGetNextMessageGetTaskAgentAccessTokenExpiredException()
{
using (TestHostContext tc = CreateTestContext())
using (var tokenSource = new CancellationTokenSource())
{
Tracing trace = tc.GetTrace();
// Arrange.
var expectedSession = new TaskAgentSession();
PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(sessionIdProperty);
sessionIdProperty.SetValue(expectedSession, Guid.NewGuid());
_runnerServer
.Setup(x => x.CreateAgentSessionAsync(
_settings.PoolId,
It.Is<TaskAgentSession>(y => y != null),
tokenSource.Token))
.Returns(Task.FromResult(expectedSession));
_credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials());
_store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken });
_store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData));
// Act.
MessageListener listener = new MessageListener();
listener.Initialize(tc);
bool result = await listener.CreateSessionAsync(tokenSource.Token);
Assert.True(result);
_runnerServer
.Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token))
.Throws(new TaskAgentAccessTokenExpiredException("test"));
try
{
await listener.GetNextMessageAsync(tokenSource.Token);
}
catch (TaskAgentAccessTokenExpiredException)
{
//expected
}
finally
{
await listener.DeleteSessionAsync();
}
//Assert
_runnerServer
.Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), tokenSource.Token), Times.Once);
_runnerServer
.Verify(x => x.DeleteAgentSessionAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<CancellationToken>()), Times.Never);
}
}
} }
} }

View File

@@ -774,14 +774,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var traceFile = Path.GetTempFileName(); var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true); File.Copy(hc.TraceFileName, traceFile, true);
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package")) Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
{
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
}
else
{
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
}
} }
} }
finally finally

View File

@@ -2,13 +2,13 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using System.IO; using System.IO;
using System.Linq;
using System.Threading; using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using Xunit; using Xunit;
using GitHub.Runner.Common.Util;
using System.Threading.Channels;
using GitHub.Runner.Sdk;
using System.Linq;
namespace GitHub.Runner.Common.Tests namespace GitHub.Runner.Common.Tests
{ {
@@ -150,128 +150,5 @@ namespace GitHub.Runner.Common.Tests
} }
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_CheckDotnetRuntimeHash()
{
using (TestHostContext hc = new TestHostContext(this))
{
Tracing trace = hc.GetTrace();
var dotnetRuntimeHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}");
trace.Info($"Current hash: {File.ReadAllText(dotnetRuntimeHashFile)}");
string layoutTrimsRuntimeAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/runtime");
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
#if OS_WINDOWS
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node12\bin\node");
#else
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node12/bin/node");
#endif
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
p1.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
trace.Info($"Hash result: '{hashResult}'");
}
else
{
trace.Info(data.Data);
}
};
p1.OutputDataReceived += (_, data) =>
{
trace.Info(data.Data);
};
var env = new Dictionary<string, string>
{
["patterns"] = "**"
};
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsRuntimeAssets,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: CancellationToken.None);
Assert.True(string.Equals(hashResult, File.ReadAllText(dotnetRuntimeHashFile).Trim()), $"Hash mismatch for dotnet runtime. You might need to update `Misc/contentHash/dotnetRuntime/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task RunnerLayoutParts_CheckExternalsHash()
{
using (TestHostContext hc = new TestHostContext(this))
{
Tracing trace = hc.GetTrace();
var externalsHashFile = Path.Combine(TestUtil.GetSrcPath(), $"Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}");
trace.Info($"Current hash: {File.ReadAllText(externalsHashFile)}");
string layoutTrimsExternalsAssets = Path.Combine(TestUtil.GetSrcPath(), @"../_layout_trims/externals");
string binDir = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/bin");
#if OS_WINDOWS
string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node12\bin\node");
#else
string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node12/bin/node");
#endif
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p1 = new ProcessInvokerWrapper();
p1.Initialize(hc);
p1.ErrorDataReceived += (_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
trace.Info($"Hash result: '{hashResult}'");
}
else
{
trace.Info(data.Data);
}
};
p1.OutputDataReceived += (_, data) =>
{
trace.Info(data.Data);
};
var env = new Dictionary<string, string>
{
["patterns"] = "**"
};
int exitCode = await p1.ExecuteAsync(workingDirectory: layoutTrimsExternalsAssets,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
cancellationToken: CancellationToken.None);
Assert.True(string.Equals(hashResult, File.ReadAllText(externalsHashFile).Trim()), $"Hash mismatch for externals. You might need to update `Misc/contentHash/externals/{BuildConstants.RunnerPackage.PackageName}` or check if `hashFiles.ts` ever changed recently.");
}
}
} }
} }

View File

@@ -1,6 +1,5 @@
using System; using System;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Text.RegularExpressions;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Listener.Configuration;
using Xunit; using Xunit;
@@ -16,7 +15,7 @@ namespace GitHub.Runner.Common.Tests
{ {
RunnerSettings settings = new RunnerSettings(); RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisiskindofalongrunnerabcde"; settings.AgentName = "thisiskindofalongrunnername1";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample"; settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
@@ -44,7 +43,7 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("actions", serviceNameParts[0]); Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]); Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-' Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
Assert.Equal("thisiskindofalongrunnerabcde", serviceNameParts[3]); Assert.Equal("thisiskindofalongrunnername1", serviceNameParts[3]);
} }
} }
@@ -55,7 +54,7 @@ namespace GitHub.Runner.Common.Tests
{ {
RunnerSettings settings = new RunnerSettings(); RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisiskindofalongrunnernabcde"; settings.AgentName = "thisiskindofalongrunnername12";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample"; settings.GitHubUrl = "https://github.com/myorganizationexample/myrepoexample";
@@ -83,129 +82,88 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("actions", serviceNameParts[0]); Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]); Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-' Assert.Equal("myorganizationexample-myrepoexample", serviceNameParts[2]); // '/' has been replaced with '-'
Assert.Equal("thisiskindofalongrunnernabcde", serviceNameParts[3]); // should not have random numbers unless we exceed 80 Assert.Equal("thisiskindofalongrunnername12", serviceNameParts[3]);
} }
} }
#if OS_WINDOWS [Fact]
[Fact] [Trait("Level", "L0")]
[Trait("Level", "L0")] [Trait("Category", "Service")]
[Trait("Category", "Service")] public void CalculateServiceNameLimitsServiceNameTo80Chars()
public void CalculateServiceNameLimitsServiceNameTo80Chars() {
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisisareallyreallylongbutstillvalidagentname";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{ {
RunnerSettings settings = new RunnerSettings(); ServiceControlManager scm = new ServiceControlManager();
settings.AgentName = "thisisareallyreallylongbutstillvalidagentname"; scm.Initialize(hc);
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; scm.CalculateServiceName(
settings.GitHubUrl = "https://github.com/myreallylongorganizationexample/myreallylongrepoexample"; settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
string serviceNamePattern = "actions.runner.{0}.{1}"; // Verify name has been shortened to 80 characters
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})"; Assert.Equal(80, serviceName.Length);
using (TestHostContext hc = CreateTestContext()) var serviceNameParts = serviceName.Split('.');
{
ServiceControlManager scm = new ServiceControlManager();
scm.Initialize(hc); // Verify that each component has been shortened to a sensible length
scm.CalculateServiceName( Assert.Equal("actions", serviceNameParts[0]); // Never shortened
settings, Assert.Equal("runner", serviceNameParts[1]); // Never shortened
serviceNamePattern, Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-'
serviceDisplayNamePattern, Assert.Equal("thisisareallyreally", serviceNameParts[3]); // Remainder of unused chars
out string serviceName,
out string serviceDisplayName);
// Verify name has been shortened to 80 characters
Assert.Equal(80, serviceName.Length);
var serviceNameParts = serviceName.Split('.');
// Verify that each component has been shortened to a sensible length
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
Assert.Equal("myreallylongorganizationexample-myreallylongr", serviceNameParts[2]); // First 45 chars, '/' has been replaced with '-'
Assert.Matches(@"^(thisisareallyr-[0-9]{4})$", serviceNameParts[3]); // Remainder of unused chars, 4 random numbers added at the end
}
} }
}
// Special 'defensive' test that verifies we can gracefully handle creating service names // Special 'defensive' test that verifies we can gracefully handle creating service names
// in case GitHub.com changes its org/repo naming convention in the future, // in case GitHub.com changes its org/repo naming convention in the future,
// and some of these characters may be invalid for service names // and some of these characters may be invalid for service names
// Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic // Not meant to test character set exhaustively -- it's just here to exercise the sanitizing logic
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Service")] [Trait("Category", "Service")]
public void CalculateServiceNameSanitizeOutOfRangeChars() public void CalculateServiceNameSanitizeOutOfRangeChars()
{
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "name";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{ {
RunnerSettings settings = new RunnerSettings(); ServiceControlManager scm = new ServiceControlManager();
settings.AgentName = "name"; scm.Initialize(hc);
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890"; scm.CalculateServiceName(
settings.GitHubUrl = "https://github.com/org!@$*+[]()/repo!@$*+[]()"; settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
string serviceNamePattern = "actions.runner.{0}.{1}"; var serviceNameParts = serviceName.Split('.');
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext()) // Verify service name parts are sanitized correctly
{ Assert.Equal("actions", serviceNameParts[0]);
ServiceControlManager scm = new ServiceControlManager(); Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
scm.Initialize(hc); Assert.Equal("name", serviceNameParts[3]);
scm.CalculateServiceName(
settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
var serviceNameParts = serviceName.Split('.');
// Verify service name parts are sanitized correctly
Assert.Equal("actions", serviceNameParts[0]);
Assert.Equal("runner", serviceNameParts[1]);
Assert.Equal("org----------repo---------", serviceNameParts[2]); // Chars replaced with '-'
Assert.Equal("name", serviceNameParts[3]);
}
} }
#else }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Service")]
public void CalculateServiceNameLimitsServiceNameTo150Chars()
{
RunnerSettings settings = new RunnerSettings();
settings.AgentName = "thisisareallyreallylongbutstillvalidagentnameiamusingforthisexampletotestverylongnamelimits";
settings.ServerUrl = "https://example.githubusercontent.com/12345678901234567890123456789012345678901234567890";
settings.GitHubUrl = "https://github.com/myreallylongorganizationexampleonlinux/myreallylongrepoexampleonlinux1234";
string serviceNamePattern = "actions.runner.{0}.{1}";
string serviceDisplayNamePattern = "GitHub Actions Runner ({0}.{1})";
using (TestHostContext hc = CreateTestContext())
{
ServiceControlManager scm = new ServiceControlManager();
scm.Initialize(hc);
scm.CalculateServiceName(
settings,
serviceNamePattern,
serviceDisplayNamePattern,
out string serviceName,
out string serviceDisplayName);
// Verify name has been shortened to 150
Assert.Equal(150, serviceName.Length);
var serviceNameParts = serviceName.Split('.');
// Verify that each component has been shortened to a sensible length
Assert.Equal("actions", serviceNameParts[0]); // Never shortened
Assert.Equal("runner", serviceNameParts[1]); // Never shortened
Assert.Equal("myreallylongorganizationexampleonlinux-myreallylongrepoexampleonlinux1", serviceNameParts[2]); // First 70 chars, '/' has been replaced with '-'
Assert.Matches(@"^(thisisareallyreallylongbutstillvalidagentnameiamusingforthi-[0-9]{4})$", serviceNameParts[3]);
}
}
#endif
private TestHostContext CreateTestContext([CallerMemberName] string testName = "") private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
{ {

View File

@@ -121,7 +121,6 @@ namespace GitHub.Runner.Common.Tests.Worker
using (TestHostContext hc = CreateTestContext()) using (TestHostContext hc = CreateTestContext())
{ {
_ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>(); _ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>();
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
var expressionValues = new DictionaryContextData var expressionValues = new DictionaryContextData
{ {
["env"] = ["env"] =
@@ -132,6 +131,7 @@ namespace GitHub.Runner.Common.Tests.Worker
#endif #endif
}; };
_ec.Setup(x => x.ExpressionValues).Returns(expressionValues); _ec.Setup(x => x.ExpressionValues).Returns(expressionValues);
_ec.Setup(x => x.JobTelemetry).Returns(new List<JobTelemetry>());
Assert.True(_commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null)); Assert.True(_commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null));
} }
@@ -148,8 +148,8 @@ namespace GitHub.Runner.Common.Tests.Worker
using (TestHostContext hc = CreateTestContext()) using (TestHostContext hc = CreateTestContext())
{ {
_ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>(); _ec.Object.Global.EnvironmentVariables = new Dictionary<string, string>();
_ec.Object.Global.JobTelemetry = new List<JobTelemetry>();
_ec.Setup(x => x.ExpressionValues).Returns(GetExpressionValues()); _ec.Setup(x => x.ExpressionValues).Returns(GetExpressionValues());
_ec.Setup(x => x.JobTelemetry).Returns(new List<JobTelemetry>());
Assert.Throws<Exception>(() => _commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null)); Assert.Throws<Exception>(() => _commandManager.TryProcessCommand(_ec.Object, $"::stop-commands::{invalidToken}", null));
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
@@ -2135,7 +2135,6 @@ runs:
_ec = new Mock<IExecutionContext>(); _ec = new Mock<IExecutionContext>();
_ec.Setup(x => x.Global).Returns(new GlobalContext()); _ec.Setup(x => x.Global).Returns(new GlobalContext());
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);
_ec.Setup(x => x.Root).Returns(new GitHub.Runner.Worker.ExecutionContext());
var variables = new Dictionary<string, VariableValue>(); var variables = new Dictionary<string, VariableValue>();
if (enableComposite) if (enableComposite)
{ {
@@ -2157,8 +2156,8 @@ runs:
_dockerManager.Setup(x => x.DockerBuild(_ec.Object, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(0)); _dockerManager.Setup(x => x.DockerBuild(_ec.Object, It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(0));
_jobServer = new Mock<IJobServer>(); _jobServer = new Mock<IJobServer>();
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>())) _jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) => .Returns((Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{ {
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() }; var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions) foreach (var action in actions.Actions)

View File

@@ -118,7 +118,7 @@ namespace GitHub.Runner.Common.Tests.Worker
await _actionRunner.RunAsync(); await _actionRunner.RunAsync();
//Assert //Assert
_ec.Verify(x => x.WriteWebhookPayload(), Times.Once); _ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once);
} }
[Fact] [Fact]

View File

@@ -1,289 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Runtime.CompilerServices;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Moq;
using Xunit;
using DTWebApi = GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class CreateStepSummaryCommandL0
{
private Mock<IExecutionContext> _executionContext;
private Mock<IJobServerQueue> _jobServerQueue;
private ExecutionContext _jobExecutionContext;
private List<Tuple<DTWebApi.Issue, string>> _issues;
private Variables _variables;
private string _rootDirectory;
private CreateStepSummaryCommand _createStepCommand;
private ITraceWriter _trace;
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_FeatureDisabled()
{
using (var hostContext = Setup(featureFlagState: "false"))
{
var stepSummaryFile = Path.Combine(_rootDirectory, "feature-off");
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_FileNull()
{
using (var hostContext = Setup())
{
_createStepCommand.ProcessCommand(_executionContext.Object, null, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_DirectoryNotFound()
{
using (var hostContext = Setup())
{
var stepSummaryFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_FileNotFound()
{
using (var hostContext = Setup())
{
var stepSummaryFile = Path.Combine(_rootDirectory, "file-not-found");
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_EmptyFile()
{
using (var hostContext = Setup())
{
var stepSummaryFile = Path.Combine(_rootDirectory, "empty-file");
File.Create(stepSummaryFile).Dispose();
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_LargeFile()
{
using (var hostContext = Setup())
{
var stepSummaryFile = Path.Combine(_rootDirectory, "empty-file");
File.WriteAllBytes(stepSummaryFile, new byte[128 * 1024 + 1]);
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>()), Times.Never());
Assert.Equal(1, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_Simple()
{
using (var hostContext = Setup())
{
var stepSummaryFile = Path.Combine(_rootDirectory, "simple");
var content = new List<string>
{
"# This is some markdown content",
"",
"## This is more markdown content",
};
WriteContent(stepSummaryFile, content);
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), ChecksAttachmentType.StepSummary, _executionContext.Object.Id.ToString(), stepSummaryFile + "-scrubbed", It.IsAny<bool>()), Times.Once());
Assert.Equal(0, _issues.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void CreateStepSummaryCommand_ScrubSecrets()
{
using (var hostContext = Setup())
{
// configure secretmasker to actually mask secrets
hostContext.SecretMasker.AddRegex("Password=.*");
hostContext.SecretMasker.AddRegex("ghs_.*");
var stepSummaryFile = Path.Combine(_rootDirectory, "simple");
var scrubbedFile = stepSummaryFile + "-scrubbed";
var content = new List<string>
{
"# Password=ThisIsMySecretPassword!",
"",
"# GITHUB_TOKEN ghs_verysecuretoken",
};
WriteContent(stepSummaryFile, content);
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
var scrubbedFileContents = File.ReadAllText(scrubbedFile);
Assert.DoesNotContain("ThisIsMySecretPassword!", scrubbedFileContents);
Assert.DoesNotContain("ghs_verysecuretoken", scrubbedFileContents);
_jobExecutionContext.Complete();
_jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny<Guid>(), It.IsAny<Guid>(), ChecksAttachmentType.StepSummary, _executionContext.Object.Id.ToString(), scrubbedFile, It.IsAny<bool>()), Times.Once());
Assert.Equal(0, _issues.Count);
}
}
private void WriteContent(
string path,
List<string> content,
string newline = null)
{
if (string.IsNullOrEmpty(newline))
{
newline = Environment.NewLine;
}
var encoding = new UTF8Encoding(true); // Emit BOM
var contentStr = string.Join(newline, content);
File.WriteAllText(path, contentStr, encoding);
}
private TestHostContext Setup([CallerMemberName] string name = "", string featureFlagState = "true")
{
var hostContext = new TestHostContext(this, name);
_issues = new List<Tuple<DTWebApi.Issue, string>>();
// Setup a job request
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "Summary Job";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
jobRequest.Variables["ACTIONS_STEP_DEBUG"] = "true";
// Server queue for job
_jobServerQueue = new Mock<IJobServerQueue>();
_jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hostContext.SetSingleton(_jobServerQueue.Object);
// Configuration store (required singleton)
var configurationStore = new Mock<IConfigurationStore>();
configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings());
hostContext.SetSingleton(configurationStore.Object);
// Paging Logger (required singleton)
var pagingLogger = new Mock<IPagingLogger>();
hostContext.EnqueueInstance(pagingLogger.Object);
// Trace
_trace = hostContext.GetTrace();
// Variables to test for secret scrubbing & FF options
_variables = new Variables(hostContext, new Dictionary<string, VariableValue>
{
{ "MySecretName", new VariableValue("My secret value", true) },
{ "DistributedTask.UploadStepSummary", featureFlagState },
});
// Directory for test data
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
Directory.CreateDirectory(workDirectory);
_rootDirectory = Path.Combine(workDirectory, nameof(CreateStepSummaryCommandL0));
Directory.CreateDirectory(_rootDirectory);
// Job execution context
_jobExecutionContext = new ExecutionContext();
_jobExecutionContext.Initialize(hostContext);
_jobExecutionContext.InitializeJob(jobRequest, System.Threading.CancellationToken.None);
// Step execution context
_executionContext = new Mock<IExecutionContext>();
_executionContext.Setup(x => x.Global)
.Returns(new GlobalContext
{
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
WriteDebug = true,
Variables = _variables,
});
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<string>()))
.Callback((DTWebApi.Issue issue, string logMessage) =>
{
_issues.Add(new Tuple<DTWebApi.Issue, string>(issue, logMessage));
var message = !string.IsNullOrEmpty(logMessage) ? logMessage : issue.Message;
_trace.Info($"Issue '{issue.Type}': {message}");
});
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
.Callback((string tag, string message) =>
{
_trace.Info($"{tag}{message}");
});
_executionContext.SetupGet(x => x.Root).Returns(_jobExecutionContext);
//CreateStepSummaryCommand
_createStepCommand = new CreateStepSummaryCommand();
_createStepCommand.Initialize(hostContext);
return hostContext;
}
}
}

View File

@@ -1,12 +1,12 @@
using System; using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker;
using Moq;
using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Runtime.CompilerServices; using System.Runtime.CompilerServices;
using System.Threading; using System.Threading;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker;
using Moq;
using Xunit; using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -90,109 +90,6 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void AddIssue_TrimMessageSize()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message.
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Act.
ec.InitializeJob(jobRequest, CancellationToken.None);
var bigMessage = "";
for (var i = 0; i < 5000; i++)
{
bigMessage += "a";
}
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = bigMessage });
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = bigMessage });
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = bigMessage });
ec.Complete();
// Assert.
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Error).Single().Message.Length <= 4096)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Warning).Single().Message.Length <= 4096)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Type == IssueType.Notice).Single().Message.Length <= 4096)), Times.AtLeastOnce);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void AddIssue_AddStepAndLineNumberInformation()
{
using (TestHostContext hc = CreateTestContext())
{
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice annotation that should have step and line number information" });
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Error).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Warning).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Notice).Count() == 1)), Times.AtLeastOnce);
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Worker")] [Trait("Category", "Worker")]
@@ -219,7 +116,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var pagingLogger = new Mock<IPagingLogger>(); var pagingLogger = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>(); var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>())); jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<long>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); }); jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(),It.IsAny<long>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
hc.EnqueueInstance(pagingLogger.Object); hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object); hc.SetSingleton(jobServerQueue.Object);
@@ -481,177 +378,6 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PublishStepTelemetry_RegularStep_NoOpt()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message.
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Act.
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
ec.Complete();
// Assert.
Assert.Equal(0, ec.Global.StepsTelemetry.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PublishStepTelemetry_RegularStep()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message.
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Act.
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
ec.StepTelemetry.Type = "node16";
ec.StepTelemetry.Action = "actions/checkout";
ec.StepTelemetry.Ref = "v2";
ec.StepTelemetry.IsEmbedded = false;
ec.StepTelemetry.StepId = Guid.NewGuid();
ec.StepTelemetry.Stage = "main";
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
ec.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
ec.Complete();
// Assert.
Assert.Equal(1, ec.Global.StepsTelemetry.Count);
Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type);
Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action);
Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref);
Assert.Equal(TaskResult.Succeeded, ec.Global.StepsTelemetry.Single().Result);
Assert.NotNull(ec.Global.StepsTelemetry.Single().ExecutionTimeInSeconds);
Assert.Equal(3, ec.Global.StepsTelemetry.Single().ErrorMessages.Count);
Assert.DoesNotContain(ec.Global.StepsTelemetry.Single().ErrorMessages, x => x.Contains("notice"));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PublishStepTelemetry_EmbeddedStep()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message.
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Act.
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.StepTelemetry.Type = "node16";
embeddedStep.StepTelemetry.Action = "actions/checkout";
embeddedStep.StepTelemetry.Ref = "v2";
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
embeddedStep.PublishStepTelemetry();
// Assert.
Assert.Equal(1, ec.Global.StepsTelemetry.Count);
Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type);
Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action);
Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref);
Assert.Equal(ActionRunStage.Main.ToString(), ec.Global.StepsTelemetry.Single().Stage);
Assert.True(ec.Global.StepsTelemetry.Single().IsEmbedded);
Assert.Null(ec.Global.StepsTelemetry.Single().Result);
Assert.Null(ec.Global.StepsTelemetry.Single().ExecutionTimeInSeconds);
Assert.Equal(0, ec.Global.StepsTelemetry.Single().ErrorMessages.Count);
}
}
private TestHostContext CreateTestContext([CallerMemberName] String testName = "") private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{ {
var hc = new TestHostContext(this, testName); var hc = new TestHostContext(this, testName);

View File

@@ -1,100 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using Moq;
using Xunit;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class HandlerFactoryL0
{
private Mock<IExecutionContext> _ec;
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
{
var hostContext = new TestHostContext(this, testName);
_ec = new Mock<IExecutionContext>();
_ec.SetupAllProperties();
_ec.Object.Initialize(hostContext);
var handler = new Mock<INodeScriptActionHandler>();
handler.SetupAllProperties();
hostContext.EnqueueInstance(handler.Object);
//hostContext.EnqueueInstance(new ActionCommandManager() as IActionCommandManager);
return hostContext;
}
[Theory]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
[InlineData("node12", "", "", "", "node12")]
[InlineData("node12", "true", "", "", "node16")]
[InlineData("node12", "true", "", "true", "node12")]
[InlineData("node12", "true", "true", "", "node12")]
[InlineData("node12", "true", "true", "true", "node12")]
[InlineData("node12", "true", "false", "true", "node16")] // workflow overrides env
[InlineData("node16", "", "", "", "node16")]
[InlineData("node16", "true", "", "", "node16")]
[InlineData("node16", "true", "", "true", "node16")]
[InlineData("node16", "true", "true", "", "node16")]
[InlineData("node16", "true", "true", "true", "node16")]
[InlineData("node16", "true", "false", "true", "node16")]
public void IsNodeVersionUpgraded(string inputVersion, string serverFeatureFlag, string workflowOptOut, string machineOptOut, string expectedVersion)
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var hf = new HandlerFactory();
hf.Initialize(hc);
// Server Feature Flag
var variables = new Dictionary<string, VariableValue>();
if (!string.IsNullOrEmpty(serverFeatureFlag))
{
variables["DistributedTask.ForceGithubJavascriptActionsToNode16"] = serverFeatureFlag;
}
Variables serverVariables = new Variables(hc, variables);
// Workflow opt-out
var workflowVariables = new Dictionary<string, string>();
if (!string.IsNullOrEmpty(workflowOptOut))
{
workflowVariables[Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion] = workflowOptOut;
}
// Machine opt-out
if (!string.IsNullOrEmpty(machineOptOut))
{
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, machineOptOut);
}
_ec.Setup(x => x.Global).Returns(new GlobalContext()
{
Variables = serverVariables,
EnvironmentVariables = workflowVariables
});
// Act.
var data = new NodeJSActionExecutionData();
data.NodeVersion = inputVersion;
var handler = hf.Create(
_ec.Object,
new ScriptReference(),
new Mock<IStepHost>().Object,
data,
new Dictionary<string, string>(),
new Dictionary<string, string>(),
new Variables(hc, new Dictionary<string, VariableValue>()), "", new List<JobExtensionRunner>()
) as INodeScriptActionHandler;
// Assert.
Assert.Equal(expectedVersion, handler.Data.NodeVersion);
Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, null);
}
}
}
}

View File

@@ -1,88 +0,0 @@
using System;
using System.Runtime.CompilerServices;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers;
using Moq;
using Xunit;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class HandlerL0
{
private Mock<IExecutionContext> _ec;
private ActionsStepTelemetry _stepTelemetry;
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{
var hc = new TestHostContext(this, testName);
_stepTelemetry = new ActionsStepTelemetry();
_ec = new Mock<IExecutionContext>();
_ec.SetupAllProperties();
_ec.Setup(x => x.StepTelemetry).Returns(_stepTelemetry);
var trace = hc.GetTrace();
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { trace.Info($"[{tag}]{message}"); });
hc.EnqueueInstance<IActionCommandManager>(new Mock<IActionCommandManager>().Object);
return hc;
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PrepareExecution_PopulateTelemetry_RepoActions()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var nodeHandler = new NodeScriptActionHandler();
nodeHandler.Initialize(hc);
nodeHandler.ExecutionContext = _ec.Object;
nodeHandler.Action = new RepositoryPathReference()
{
Name = "actions/checkout",
Ref = "v2"
};
// Act.
nodeHandler.PrepareExecution(ActionRunStage.Main);
hc.GetTrace().Info($"Telemetry: {StringUtil.ConvertToJson(_stepTelemetry)}");
// Assert.
Assert.Equal("repository", _stepTelemetry.Type);
Assert.Equal("actions/checkout", _stepTelemetry.Action);
Assert.Equal("v2", _stepTelemetry.Ref);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PrepareExecution_PopulateTelemetry_DockerActions()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange.
var nodeHandler = new NodeScriptActionHandler();
nodeHandler.Initialize(hc);
nodeHandler.ExecutionContext = _ec.Object;
nodeHandler.Action = new ContainerRegistryReference()
{
Image = "ubuntu:20.04"
};
// Act.
nodeHandler.PrepareExecution(ActionRunStage.Main);
hc.GetTrace().Info($"Telemetry: {StringUtil.ConvertToJson(_stepTelemetry)}");
// Assert.
Assert.Equal("docker", _stepTelemetry.Type);
Assert.Equal("ubuntu:20.04", _stepTelemetry.Action);
}
}
}
}

View File

@@ -25,7 +25,6 @@ namespace GitHub.Runner.Common.Tests.Worker
private Mock<IPagingLogger> _logger; private Mock<IPagingLogger> _logger;
private Mock<IContainerOperationProvider> _containerProvider; private Mock<IContainerOperationProvider> _containerProvider;
private Mock<IDiagnosticLogManager> _diagnosticLogManager; private Mock<IDiagnosticLogManager> _diagnosticLogManager;
private Mock<IJobHookProvider> _jobHookProvider;
private CancellationTokenSource _tokenSource; private CancellationTokenSource _tokenSource;
private TestHostContext CreateTestContext([CallerMemberName] String testName = "") private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
@@ -41,7 +40,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_directoryManager = new Mock<IPipelineDirectoryManager>(); _directoryManager = new Mock<IPipelineDirectoryManager>();
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>())) _directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" }); .Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
_jobHookProvider = new Mock<IJobHookProvider>();
IActionRunner step1 = new ActionRunner(); IActionRunner step1 = new ActionRunner();
IActionRunner step2 = new ActionRunner(); IActionRunner step2 = new ActionRunner();
@@ -113,9 +111,7 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SetSingleton(_containerProvider.Object); hc.SetSingleton(_containerProvider.Object);
hc.SetSingleton(_directoryManager.Object); hc.SetSingleton(_directoryManager.Object);
hc.SetSingleton(_diagnosticLogManager.Object); hc.SetSingleton(_diagnosticLogManager.Object);
hc.SetSingleton(_jobHookProvider.Object);
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2
@@ -124,7 +120,6 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job complete hook
hc.EnqueueInstance<IActionRunner>(step1); hc.EnqueueInstance<IActionRunner>(step1);
hc.EnqueueInstance<IActionRunner>(step2); hc.EnqueueInstance<IActionRunner>(step2);
@@ -353,62 +348,5 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _jobEc.Result); Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async Task EnsurePreAndPostHookStepsIfEnvExists()
{
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", "/foo/bar");
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", "/bar/foo");
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);
var trace = hc.GetTrace();
var hookStart = result.First() as JobExtensionRunner;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(Constants.Hooks.JobStartedStepName, hookStart.DisplayName);
Assert.Equal(Constants.Hooks.JobCompletedStepName, (_jobEc.PostJobSteps.Last() as JobExtensionRunner).DisplayName);
}
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", null);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", null);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void EnsureNoPreAndPostHookSteps()
{
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_message.ActionsEnvironment = null;
_jobEc = new Runner.Worker.ExecutionContext {Result = TaskResult.Succeeded};
_jobEc.Initialize(hc);
_jobEc.InitializeJob(_message, _tokenSource.Token);
var x = _jobEc.JobSteps;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
Assert.Equal(0, _jobEc.PostJobSteps.Count);
}
}
} }
} }

View File

@@ -302,50 +302,6 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_MissingNewLine()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
"line one",
"line two",
"line three",
"EOF",
};
WriteContent(envFile, content, " ");
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
Assert.Contains("Matching delimiter not found", ex.Message);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLinesEnv()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
@"line one
line two
line three",
"EOF",
};
WriteContent(envFile, content, " ");
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
Assert.Contains("EOF marker missing new line", ex.Message);
}
}
#if OS_WINDOWS #if OS_WINDOWS
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]

View File

@@ -1 +0,0 @@
{ "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}

View File

@@ -128,7 +128,6 @@ function layout ()
chmod +x "${LAYOUT_DIR}/bin/Runner.Worker" chmod +x "${LAYOUT_DIR}/bin/Runner.Worker"
chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost" chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost"
chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh" chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh"
chmod +x "${LAYOUT_DIR}/safe_sleep.sh"
fi fi
heading "Setup externals folder for $RUNTIME_ID runner's layout" heading "Setup externals folder for $RUNTIME_ID runner's layout"

View File

@@ -1 +1 @@
2.289.0 2.286.2