Compare commits

..

2 Commits

Author SHA1 Message Date
Thomas Boop
e95a2b840b Release version 288.1 2022-02-28 14:07:05 -05:00
Tingluo Huang
d6af5b2955 Create 2.288.0 runner release. 2022-02-25 16:43:45 -05:00
29 changed files with 4174 additions and 2912 deletions

View File

@@ -260,17 +260,6 @@ jobs:
console.log(releaseNote)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release
- uses: actions/create-release@master
id: createRelease

View File

@@ -1,12 +1,32 @@
## Features
- Make run.sh|cmd handle update without quitting so containers using them as entrypoints don't exit on update (#1646, #1633, #1708)
- Add support for Step Summary (#1642, #1667, #1712)
- Pass jobId to the actionsDownloadInfo controller (#1639)
- updated systemd svc.sh to accept custom service file (#1612)
- Add ability to specify runner group when creating service (#1675)
- Prefer node16 over node12 when running internal scripts (#1621)
- Sending telemetry about actions usage. (#1688)
- Bump node12 version to latest (#1651)
- Add internal to node version function and use better env var name (#1715)
- Force JS Actions Node version to 16 if FF is on unless user opted out (#1716)
## Bugs
- Fixed a crash on runner startup (#1770)
- Fix windows console runner update crash (#1670)
- Retry policy for methods GetTenantCredential and GetJITRunnerTokenAsync (#1691)
- Skip DeleteAgentSession when the acess token has been revoked. (#1692)
- Repaired hashFiles call so if error was thrown, it was returned to process invoker (#1678)
- Runner throws null ref exception when new line after EOF is missing (#1687)
- Lets allow up to 150 characters for services on linux/mac (#1710)
## Misc
- Clarified the type of step running when running job started or completed hooks (#1769)
- Added examples and aligned language within docs/checks/actions.md (#1664)
- Problem with debugging on macOS M1 (#1625)
- Fix typo in hashFiles.ts. (#1672)
- Allow mocked updates for E2E testing (#1654)
- Move JobTelemetry and StepsTelemetry into GlobalContext. (#1680)
- Fix inconsistency of outputs (both canceled and cancelled are used (#1624)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
2.289.1
2.288.1

View File

@@ -1,6 +1,6 @@
{
"plugins": ["@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
@@ -17,16 +17,13 @@
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/naming-convention": [
"error",
{
"selector": "default",
"format": ["camelCase"]
}
],
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
@@ -36,6 +33,7 @@
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
@@ -43,19 +41,19 @@
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning
"semi": "off"
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true
"es6": true,
"jest/globals": true
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -25,10 +25,10 @@
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}

View File

@@ -1,9 +1,9 @@
import * as glob from '@actions/glob'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as glob from '@actions/glob'
import * as path from 'path'
import * as stream from 'stream'
import * as util from 'util'
import * as path from 'path'
async function run(): Promise<void> {
// arg0 -> node

View File

@@ -3,135 +3,94 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process");
var path = require("path");
var path = require("path")
var supported = ["linux", "darwin"];
var supported = ['linux', 'darwin']
if (supported.indexOf(process.platform) == -1) {
console.log("Unsupported platform: " + process.platform);
console.log("Supported platforms are: " + supported.toString());
process.exit(1);
console.log('Unsupported platform: ' + process.platform);
console.log('Supported platforms are: ' + supported.toString());
process.exit(1);
}
var stopping = false;
var listener = null;
var exitServiceAfterNFailures = Number(
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
);
var runService = function () {
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
var interactive = process.argv[2] === "interactive";
if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
if (!stopping) {
try {
if (interactive) {
console.log('Starting Runner listener interactively');
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
} else {
console.log('Starting Runner listener with startup type: service');
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.stderr.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on('close', (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
stopping = true;
} else if (code === 1) {
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
stopping = true;
} else if (code === 2) {
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
} else if (code === 3) {
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
} else {
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
}
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () {
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var interactive = process.argv[2] === "interactive";
if (!stopping) {
try {
if (interactive) {
console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
} else {
console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
listener.stderr.on("data", (data) => {
process.stdout.write(data.toString("utf8"));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on("close", (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
stopping = true;
} else if (code === 1) {
console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
stopping = true;
} else if (code === 2) {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
};
runService();
console.log("Started running service");
console.log('Started running service');
process.on("SIGINT", () => {
gracefulShutdown();
var gracefulShutdown = function (code) {
console.log('Shutting down runner listener');
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
}
}
process.on('SIGINT', () => {
gracefulShutdown(0);
});
process.on("SIGTERM", () => {
gracefulShutdown();
process.on('SIGTERM', () => {
gracefulShutdown(0);
});

View File

@@ -63,7 +63,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH}
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -1557,12 +1557,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const glob = __importStar(__webpack_require__(281));
const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747));
const glob = __importStar(__webpack_require__(281));
const path = __importStar(__webpack_require__(622));
const stream = __importStar(__webpack_require__(413));
const util = __importStar(__webpack_require__(669));
const path = __importStar(__webpack_require__(622));
function run() {
var e_1, _a;
return __awaiter(this, void 0, void 0, function* () {

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ]
do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 2
sleep 2
done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 1
sleep 1
# the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder)

View File

@@ -10,6 +10,22 @@ fi
# Run
shopt -s nocasematch
safe_sleep() {
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
}
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
@@ -28,17 +44,17 @@ elif [[ $returnCode == 1 ]]; then
exit 0
elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
"$DIR"/safe_sleep.sh 5
safe_sleep
exit 2
elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
safe_sleep
exit 2
elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
safe_sleep
exit 2
else
echo "Exiting with unknown error code: ${returnCode}"

View File

@@ -1,6 +0,0 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -189,12 +189,6 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success";
}
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path
{
public static readonly string ActionsDirectory = "_actions";

View File

@@ -3,27 +3,23 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService, IAsyncDisposable
public interface IJobServer : IRunnerService
{
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
@@ -38,20 +34,6 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection)
{
@@ -135,19 +117,6 @@ namespace GitHub.Runner.Common
}
}
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection()
{
if (!_hasConnection)
@@ -156,48 +125,6 @@ namespace GitHub.Runner.Common
}
}
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch(Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs
//-----------------------------------------------------------------
@@ -208,72 +135,16 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
}
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
{
CheckConnection();
var pushedLinesViaWebsocket = false;
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
if (_websocketClient != null)
{
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
}
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)

View File

@@ -71,7 +71,7 @@ namespace GitHub.Runner.Common
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
// Then the dequeue will happen every 500ms.
// In this way, customer still can get instance live console output on job start,
// In this way, customer still can get instance live console output on job start,
// at the same time we can cut the load to server after the build run for more than 60s
private int _webConsoleLineAggressiveDequeueCount = 0;
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
@@ -89,10 +89,6 @@ namespace GitHub.Runner.Common
{
Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -160,9 +156,6 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
@@ -299,7 +292,15 @@ namespace GitHub.Runner.Common
{
try
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
// we will not requeue failed batch, since the web console lines are time sensitive.
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs)
{
@@ -488,8 +489,8 @@ namespace GitHub.Runner.Common
if (runOnce)
{
// continue process timeline records update,
// we might have more records need update,
// continue process timeline records update,
// we might have more records need update,
// since we just create a new sub-timeline
if (pendingSubtimelineUpdate)
{

View File

@@ -1,5 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -9,7 +11,6 @@ using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -140,7 +141,21 @@ namespace GitHub.Runner.Worker
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
ExecutionContext.WriteWebhookPayload();
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&

View File

@@ -109,7 +109,6 @@ namespace GitHub.Runner.Worker
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
void PublishStepTelemetry();
void WriteWebhookPayload();
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
@@ -551,15 +550,10 @@ namespace GitHub.Runner.Worker
issue.Message = issue.Message[.._maxIssueMessageLength];
}
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
if (_record.Order != null)
{
issue.Data["stepNumber"] = _record.Order.ToString();
}
if (issue.Type == IssueType.Error)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Error, logMessage);
@@ -575,6 +569,8 @@ namespace GitHub.Runner.Worker
}
else if (issue.Type == IssueType.Warning)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Warning, logMessage);
@@ -590,6 +586,9 @@ namespace GitHub.Runner.Worker
}
else if (issue.Type == IssueType.Notice)
{
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
@@ -992,24 +991,6 @@ namespace GitHub.Runner.Worker
}
}
public void WriteWebhookPayload()
{
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
SetGitHubContext("event_path", workflowFile);
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{
_mainTimelineId = timelineId;

View File

@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
{
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"action",
"action_path",
"action_ref",
"action_repository",
"action",
"actor",
"api_url",
"base_ref",
@@ -22,12 +22,12 @@ namespace GitHub.Runner.Worker
"head_ref",
"job",
"path",
"ref",
"ref_name",
"ref_protected",
"ref_type",
"ref",
"repository_owner",
"repository",
"repository_owner",
"retention_days",
"run_attempt",
"run_id",
@@ -35,7 +35,6 @@ namespace GitHub.Runner.Worker
"server_url",
"sha",
"step_summary",
"triggering_actor",
"workflow",
"workspace",
};

View File

@@ -36,7 +36,6 @@ namespace GitHub.Runner.Worker.Handlers
protected IActionCommandManager ActionCommandManager { get; private set; }
public Pipelines.ActionStepDefinitionReference Action { get; set; }
public bool IsActionStep => Action != null;
public Dictionary<string, string> Environment { get; set; }
public Variables RuntimeVariables { get; set; }
public IExecutionContext ExecutionContext { get; set; }
@@ -50,18 +49,13 @@ namespace GitHub.Runner.Worker.Handlers
// Print out action details
PrintActionDetails(stage);
// Get telemetry for the action
PopulateActionTelemetry(stage);
// Get telemetry for the action.
PopulateActionTelemetry();
}
protected void PopulateActionTelemetry(ActionRunStage stage)
protected void PopulateActionTelemetry()
{
if (!IsActionStep)
{
ExecutionContext.StepTelemetry.Type = "runner";
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
}
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
ExecutionContext.StepTelemetry.Type = "docker";
var registryAction = Action as Pipelines.ContainerRegistryReference;

View File

@@ -24,22 +24,16 @@ namespace GitHub.Runner.Worker.Handlers
protected override void PrintActionDetails(ActionRunStage stage)
{
// if we're executing a Job Extension, we won't have an 'Action'
if (!IsActionStep)
if (stage == ActionRunStage.Post)
{
if (Inputs.TryGetValue("path", out var path))
{
ExecutionContext.Output($"##[group]Run '{path}'");
}
else
{
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
}
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
else if (Action.Type == Pipelines.ActionSourceType.Script)
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
if (Action.Type == Pipelines.ActionSourceType.Script)
{
Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty;
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0)
@@ -48,16 +42,17 @@ namespace GitHub.Runner.Worker.Handlers
}
ExecutionContext.Output($"##[group]Run {firstLine}");
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
}
else
{
throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}");
throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}");
}
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
// Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
string argFormat;
@@ -137,6 +132,11 @@ namespace GitHub.Runner.Worker.Handlers
public async Task RunAsync(ActionRunStage stage)
{
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
// Validate args
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
@@ -212,8 +212,7 @@ namespace GitHub.Runner.Worker.Handlers
}
}
// Don't override runner telemetry here
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
if (!string.IsNullOrEmpty(shellCommand))
{
ExecutionContext.StepTelemetry.Action = shellCommand;
}
@@ -223,24 +222,10 @@ namespace GitHub.Runner.Worker.Handlers
{
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
}
string scriptFilePath, resolvedScriptPath;
if (IsActionStep)
{
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
}
else
{
// JobExtensionRunners run a script file, we load that from the inputs here
if (!Inputs.ContainsKey("path"))
{
throw new ArgumentException("Expected 'path' input to be set");
}
scriptFilePath = Inputs["path"];
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
}
// We do not not the full path until we know what shell is being used, so that we can determine the file extension
var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
// Format arg string with script path
var arguments = string.Format(argFormat, resolvedScriptPath);
@@ -256,12 +241,9 @@ namespace GitHub.Runner.Worker.Handlers
#else
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
var encoding = new UTF8Encoding(false);
#endif
if (IsActionStep)
{
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
}
#endif
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding);
// Prepend PATH
AddPrependPathToEnvironment();

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers
{
@@ -81,22 +79,5 @@ namespace GitHub.Runner.Worker.Handlers
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
}
}
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
{
var format = "{0} {1}";
switch (Path.GetExtension(path))
{
case ".sh":
// use 'sh' args but prefer bash
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
return string.Format(format, pathToShell, _defaultArguments["sh"]);
case ".ps1":
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
default:
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
}
}
}
}

View File

@@ -15,7 +15,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
@@ -249,19 +248,6 @@ namespace GitHub.Runner.Worker
Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
// add hook to preJobSteps
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
if (!string.IsNullOrEmpty(startedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobStartedStepName,
data: (object)jobHookData));
}
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps
@@ -351,18 +337,6 @@ namespace GitHub.Runner.Worker
}
}
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobCompletedStepName,
data: (object)jobHookData));
}
List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps);
steps.AddRange(jobSteps);
@@ -432,7 +406,7 @@ namespace GitHub.Runner.Worker
// create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "complete_job";
context.StepTelemetry.Action = "complete_joh";
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{
try

View File

@@ -1,95 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(JobHookProvider))]
public interface IJobHookProvider : IRunnerService
{
Task RunHook(IExecutionContext executionContext, object data);
}
public class JobHookData
{
public string Path {get; private set;}
public ActionRunStage Stage {get; private set;}
public JobHookData(ActionRunStage stage, string path)
{
Path = path;
Stage = stage;
}
}
public class JobHookProvider : RunnerService, IJobHookProvider
{
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
}
public async Task RunHook(IExecutionContext executionContext, object data)
{
// Get Inputs
var hookData = data as JobHookData;
ArgUtil.NotNull(hookData, nameof(JobHookData));
var displayName = hookData.Stage == ActionRunStage.Pre ? "job started hook" : "job completed hook";
// Log to users so that they know how this step was injected
executionContext.Output($"A {displayName} has been configured by the self-hosted runner administrator");
// Validate script file.
if (!File.Exists(hookData.Path))
{
throw new FileNotFoundException("File doesn't exist");
}
executionContext.WriteWebhookPayload();
// Create the handler data.
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
var stepHost = HostContext.CreateService<IDefaultStepHost>();
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
Dictionary<string, string> inputs = new()
{
["path"] = hookData.Path,
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
};
// Create the handler
var handlerFactory = HostContext.GetService<IHandlerFactory>();
var handler = handlerFactory.Create(
executionContext,
action: null,
stepHost,
new ScriptActionExecutionData(),
inputs,
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
executionContext.Global.Variables,
actionDirectory: scriptDirectory,
localActionContainerSetupSteps: null);
handler.PrepareExecution(hookData.Stage);
// Setup file commands
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(executionContext, null);
// Run the step and process the file commands
try
{
await handler.RunAsync(hookData.Stage);
}
finally
{
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
}
}
}
}

View File

@@ -118,7 +118,7 @@ namespace GitHub.Runner.Common.Tests.Worker
await _actionRunner.RunAsync();
//Assert
_ec.Verify(x => x.WriteWebhookPayload(), Times.Once);
_ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once);
}
[Fact]

View File

@@ -144,55 +144,6 @@ namespace GitHub.Runner.Common.Tests.Worker
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void AddIssue_AddStepAndLineNumberInformation()
{
using (TestHostContext hc = CreateTestContext())
{
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice annotation that should have step and line number information" });
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Error).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Warning).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Notice).Count() == 1)), Times.AtLeastOnce);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]

View File

@@ -25,7 +25,6 @@ namespace GitHub.Runner.Common.Tests.Worker
private Mock<IPagingLogger> _logger;
private Mock<IContainerOperationProvider> _containerProvider;
private Mock<IDiagnosticLogManager> _diagnosticLogManager;
private Mock<IJobHookProvider> _jobHookProvider;
private CancellationTokenSource _tokenSource;
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
@@ -41,7 +40,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_directoryManager = new Mock<IPipelineDirectoryManager>();
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
_jobHookProvider = new Mock<IJobHookProvider>();
IActionRunner step1 = new ActionRunner();
IActionRunner step2 = new ActionRunner();
@@ -113,9 +111,7 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SetSingleton(_containerProvider.Object);
hc.SetSingleton(_directoryManager.Object);
hc.SetSingleton(_diagnosticLogManager.Object);
hc.SetSingleton(_jobHookProvider.Object);
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2
@@ -124,7 +120,6 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job complete hook
hc.EnqueueInstance<IActionRunner>(step1);
hc.EnqueueInstance<IActionRunner>(step2);
@@ -353,62 +348,5 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async Task EnsurePreAndPostHookStepsIfEnvExists()
{
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", "/foo/bar");
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", "/bar/foo");
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);
var trace = hc.GetTrace();
var hookStart = result.First() as JobExtensionRunner;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(Constants.Hooks.JobStartedStepName, hookStart.DisplayName);
Assert.Equal(Constants.Hooks.JobCompletedStepName, (_jobEc.PostJobSteps.Last() as JobExtensionRunner).DisplayName);
}
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", null);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", null);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void EnsureNoPreAndPostHookSteps()
{
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_message.ActionsEnvironment = null;
_jobEc = new Runner.Worker.ExecutionContext {Result = TaskResult.Succeeded};
_jobEc.Initialize(hc);
_jobEc.InitializeJob(_message, _tokenSource.Token);
var x = _jobEc.JobSteps;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
Assert.Equal(0, _jobEc.PostJobSteps.Count);
}
}
}
}

View File

@@ -128,7 +128,6 @@ function layout ()
chmod +x "${LAYOUT_DIR}/bin/Runner.Worker"
chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost"
chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh"
chmod +x "${LAYOUT_DIR}/safe_sleep.sh"
fi
heading "Setup externals folder for $RUNTIME_ID runner's layout"

View File

@@ -1 +1 @@
2.289.1
2.288.1