Compare commits

...

14 Commits

Author SHA1 Message Date
Ferenc Hammerl
bc0c1263f0 Release version 289.0 2022-03-18 16:33:39 +01:00
Thomas Boop
82a4ca9a6b 2.289.0 release notes 2022-03-17 21:55:53 -04:00
Yashwanth Anantharaju
d081289ed5 postlines: refactor per feedback (#1755)
* refactor per feedback

* feedback

* nit

* commentify

* feedback

* feedback
2022-03-17 21:35:20 -04:00
Ferenc Hammerl
7d5e9cd70f Runner Job Started/Completed Hooks (#1737)
* Prototype for pre job hook

* Remove debug log

* Enable hooks again

* Initialize with hostContext

* Add event_path, fix no-path bug

* Allow script post steps

* Call script handler with correct pre post stage

* Add job completed hook

* Make filecommand work and hardcode shell

* Conditionally print step details and no telemetry for hooks

* Figure out whih script to use

* Only check path for managed scripts

* Resture win dependency

* Nits

* Remove unused, add named params

* Telemetry + refactoring

* add message to job

* rename hooks remove stale comment

* cleanup

* Use .CreateService to create step

* Add L0s

* pr feedback

* update tests

* add disclaimer, clean up code

* spacing fix

* little more cleanup

* pr fix

* pr feedback

* Refactor to use JobExtension

* fix tests

* fix typo

* cleanup code

* more cleanup

* little more cleanup

* last bit of cleanup

* fix tests

* nit fix

* Update src/Runner.Worker/JobHookProvider.cs

Co-authored-by: Edward Thomson <ethomson@github.com>

* don't override runner telemtry

* pr feedback

* pr feedback

* pr feedback

Co-authored-by: Thomas Boop <thboop@github.com>
Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>
Co-authored-by: Edward Thomson <ethomson@github.com>
2022-03-17 21:35:04 -04:00
ruvceskistefan
98aa9c1152 Added repository name and workflow file name to console output (#1761)
* Adding repo name and workflow file to console output

* Add guard for empty workflow file name
2022-03-17 13:25:28 -04:00
Yashwanth Anantharaju
ddc700e9eb Send postlines via websocket if we can (#1730)
* feed via websocket

* feed via websocket

* feedback

* ensure right schema is used

* fix resiliency

* some fixes

* fix sending message

* chunk data

* let's abort, which will also dispose

* close gracefully
2022-03-15 14:01:18 -04:00
Konrad Pabjan
a0458aebfe Save record order for annotation links when creating issues (#1744)
* Save record order for annotation links when creating issues

* PR feedback

* Add tests for step and line numbers
2022-03-14 11:20:11 -04:00
Tingluo Huang
b2c6d093b2 Validate packages hash before uploading to github release in CD workflow. (#1745) 2022-03-14 09:21:13 -04:00
Thomas Boop
292a2e0ab3 Fix spelling (#1747) 2022-03-11 09:41:54 -05:00
Nikola Jokic
29cee52276 Prefer user who initiated install before (#1714) 2022-03-10 14:08:19 +01:00
Antoine Grondin
ad0d0c4d0a worker: expose github.triggering_actor as an env-var (#1726)
* worker: expose `github.triggering_actor` as an env-var

* worker: sort the allow list
2022-03-02 16:49:26 -05:00
Thomas Boop
2c6064a655 Update to v2.288.1 (#1723)
We hotfixed the releases/m288 branch to update to v2.288.1. This PR brings us to parity on the main branch
2022-03-01 18:19:56 +00:00
ruvceskistefan
af6c8e6edd Issue 1698: Use safe_sleep executable in bash scripts (#1707)
* use safe_sleep executable in bash scripts

* new line at the end of safe_sleep bash script

* Replacing relative paths with absolute paths and changing location of safe_sleep

Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2022-03-01 13:08:52 +00:00
Nikola Jokic
c15d3f10b2 Enhancement: RunnerService.js added logic to fail on N attempts if env variable exported (#1693)
* RunnerService.js added logic to fail on N attempts

* removed code grafeculShutdown, removed unused import
2022-03-01 13:55:25 +01:00
25 changed files with 653 additions and 207 deletions

View File

@@ -260,6 +260,17 @@ jobs:
console.log(releaseNote) console.log(releaseNote)
core.setOutput('version', runnerVersion); core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote); core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release # Create GitHub release
- uses: actions/create-release@master - uses: actions/create-release@master
id: createRelease id: createRelease

View File

@@ -1,32 +1,18 @@
## Features ## Features
- Make run.sh|cmd handle update without quitting so containers using them as entrypoints don't exit on update (#1646, #1633, #1708) - Added `github.triggering_actor` to the `github` context (#1726)
- Add support for Step Summary (#1642, #1667, #1712) - Save step information when creating annotations (#1744)
- Pass jobId to the actionsDownloadInfo controller (#1639) - Improved performance of live log streaming (#1730, #1755)
- updated systemd svc.sh to accept custom service file (#1612) - Added Beta support for job started and completed hooks (#1737)
- Add ability to specify runner group when creating service (#1675)
- Prefer node16 over node12 when running internal scripts (#1621)
- Sending telemetry about actions usage. (#1688)
- Bump node12 version to latest (#1651)
- Add internal to node version function and use better env var name (#1715)
- Force JS Actions Node version to 16 if FF is on unless user opted out (#1716)
## Bugs ## Bugs
- Fix windows console runner update crash (#1670)
- Retry policy for methods GetTenantCredential and GetJITRunnerTokenAsync (#1691)
- Skip DeleteAgentSession when the acess token has been revoked. (#1692)
- Repaired hashFiles call so if error was thrown, it was returned to process invoker (#1678)
- Runner throws null ref exception when new line after EOF is missing (#1687)
- Lets allow up to 150 characters for services on linux/mac (#1710)
## Misc ## Misc
- Added examples and aligned language within docs/checks/actions.md (#1664) - Made some minor job telemetry improvements (#1747)
- Problem with debugging on macOS M1 (#1625) - Added repository name and workflow file name to `run.sh` output (#1761)
- Fix typo in hashFiles.ts. (#1672)
- Allow mocked updates for E2E testing (#1654)
- Move JobTelemetry and StepsTelemetry into GlobalContext. (#1680)
- Fix inconsistency of outputs (both canceled and cancelled are used (#1624)
## Windows x64 ## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows. We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release> 2.289.0

View File

@@ -3,94 +3,135 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information. // Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process"); var childProcess = require("child_process");
var path = require("path") var path = require("path");
var supported = ['linux', 'darwin'] var supported = ["linux", "darwin"];
if (supported.indexOf(process.platform) == -1) { if (supported.indexOf(process.platform) == -1) {
console.log('Unsupported platform: ' + process.platform); console.log("Unsupported platform: " + process.platform);
console.log('Supported platforms are: ' + supported.toString()); console.log("Supported platforms are: " + supported.toString());
process.exit(1); process.exit(1);
} }
var stopping = false; var stopping = false;
var listener = null; var listener = null;
var exitServiceAfterNFailures = Number(
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
);
if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () { var runService = function () {
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener'); var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var interactive = process.argv[2] === "interactive"; var interactive = process.argv[2] === "interactive";
if (!stopping) { if (!stopping) {
try { try {
if (interactive) { if (interactive) {
console.log('Starting Runner listener interactively'); console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env }); listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
} else { } else {
console.log('Starting Runner listener with startup type: service'); console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env }); listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
} }
console.log(`Started listener process, pid: ${listener.pid}`); console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on('data', (data) => { listener.stdout.on("data", (data) => {
process.stdout.write(data.toString('utf8')); if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
}); });
listener.stderr.on('data', (data) => { listener.stderr.on("data", (data) => {
process.stdout.write(data.toString('utf8')); process.stdout.write(data.toString("utf8"));
}); });
listener.on("error", (err) => { listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`); console.log(`Runner listener fail to start with error ${err.message}`);
}); });
listener.on('close', (code) => { listener.on("close", (code) => {
console.log(`Runner listener exited with error code ${code}`); console.log(`Runner listener exited with error code ${code}`);
if (code === 0) { if (code === 0) {
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.'); console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
stopping = true; stopping = true;
} else if (code === 1) { } else if (code === 1) {
console.log('Runner listener exit with terminated error, stop the service, no retry needed.'); console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
stopping = true; stopping = true;
} else if (code === 2) { } else if (code === 2) {
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.'); console.log(
} else if (code === 3) { "Runner listener exit with retryable error, re-launch runner in 5 seconds."
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.'); );
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else { } else {
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.'); var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
} }
if (!stopping) { if (!stopping) {
setTimeout(runService, 5000); setTimeout(runService, 5000);
} }
}); });
} catch (ex) { } catch (ex) {
console.log(ex); console.log(ex);
} }
} }
} };
runService(); runService();
console.log('Started running service'); console.log("Started running service");
var gracefulShutdown = function (code) { process.on("SIGINT", () => {
console.log('Shutting down runner listener'); gracefulShutdown();
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
}
}
process.on('SIGINT', () => {
gracefulShutdown(0);
}); });
process.on('SIGTERM', () => { process.on("SIGTERM", () => {
gracefulShutdown(0); gracefulShutdown();
}); });

View File

@@ -63,7 +63,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}" mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH} echo Creating ${PLIST_PATH}
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist" mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ] while [ -e /proc/$runnerpid ]
do do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
sleep 2 "$rootfolder"/safe_sleep.sh 2
done done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1 date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders # start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1 date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
sleep 1 "$rootfolder"/safe_sleep.sh 1
# the folder structure under runner root will be # the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder) # ./bin -> bin.2.100.0 (junction folder)

View File

@@ -10,22 +10,6 @@ fi
# Run # Run
shopt -s nocasematch shopt -s nocasematch
safe_sleep() {
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
}
SOURCE="${BASH_SOURCE[0]}" SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
@@ -44,17 +28,17 @@ elif [[ $returnCode == 1 ]]; then
exit 0 exit 0
elif [[ $returnCode == 2 ]]; then elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds." echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
safe_sleep "$DIR"/safe_sleep.sh 5
exit 2 exit 2
elif [[ $returnCode == 3 ]]; then elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish # Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds" echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
safe_sleep "$DIR"/safe_sleep.sh 5
exit 2 exit 2
elif [[ $returnCode == 4 ]]; then elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish # Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds" echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
safe_sleep "$DIR"/safe_sleep.sh 5
exit 2 exit 2
else else
echo "Exiting with unknown error code: ${returnCode}" echo "Exiting with unknown error code: ${returnCode}"

View File

@@ -0,0 +1,6 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -189,6 +189,12 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success"; public static readonly string Success = "success";
} }
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path public static class Path
{ {
public static readonly string ActionsDirectory = "_actions"; public static readonly string ActionsDirectory = "_actions";

View File

@@ -3,23 +3,27 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Net.Http; using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
{ {
[ServiceLocator(Default = typeof(JobServer))] [ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService public interface IJobServer : IRunnerService, IAsyncDisposable
{ {
Task ConnectAsync(VssConnection jobConnection); Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console // logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken); Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken); Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
@@ -34,6 +38,20 @@ namespace GitHub.Runner.Common
private bool _hasConnection; private bool _hasConnection;
private VssConnection _connection; private VssConnection _connection;
private TaskHttpClient _taskClient; private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection) public async Task ConnectAsync(VssConnection jobConnection)
{ {
@@ -117,6 +135,19 @@ namespace GitHub.Runner.Common
} }
} }
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection() private void CheckConnection()
{ {
if (!_hasConnection) if (!_hasConnection)
@@ -125,6 +156,48 @@ namespace GitHub.Runner.Common
} }
} }
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch(Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//----------------------------------------------------------------- //-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs // Feedback: WebConsole, TimelineRecords and Logs
//----------------------------------------------------------------- //-----------------------------------------------------------------
@@ -135,16 +208,72 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken); return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
} }
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken) public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
{ {
CheckConnection(); CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken); var pushedLinesViaWebsocket = false;
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
} }
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken) // "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
if (_websocketClient != null)
{ {
CheckConnection(); var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken); var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
} }
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken) public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)

View File

@@ -89,6 +89,10 @@ namespace GitHub.Runner.Common
{ {
Trace.Entering(); Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess) if (_queueInProcess)
{ {
Trace.Info("No-opt, all queue process tasks are running."); Trace.Info("No-opt, all queue process tasks are running.");
@@ -156,6 +160,9 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true); await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained."); Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained."); Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
} }
@@ -292,15 +299,7 @@ namespace GitHub.Runner.Common
{ {
try try
{ {
// we will not requeue failed batch, since the web console lines are time sensitive. await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs) if (_firstConsoleOutputs)
{ {

View File

@@ -346,7 +346,10 @@ namespace GitHub.Runner.Listener
} }
var term = HostContext.GetService<ITerminal>(); var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
string workflowName = message.Variables["system.workflowFilePath"].Value.Split('/').LastOrDefault();
string additionalInfo = string.IsNullOrEmpty(workflowName) ? $"(in repository \"{_runnerSettings.RepoOrOrgName}\")" : $"(workflow \"{workflowName}\" in repository \"{_runnerSettings.RepoOrOrgName}\")";
term.WriteLine($"{DateTime.UtcNow:u}: Running job: \"{message.JobDisplayName}\" {additionalInfo}");
// first job request renew succeed. // first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>(); TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
@@ -531,7 +534,7 @@ namespace GitHub.Runner.Listener
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode); TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}"); Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}"); term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}."); Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock // stop renew lock
@@ -627,7 +630,7 @@ namespace GitHub.Runner.Listener
} }
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}"); Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}"); term.WriteLine($"{DateTime.UtcNow:u}: Job \"{message.JobDisplayName}\" {additionalInfo} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished. // complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}."); Trace.Info($"Stop renew job request for job {message.JobId}.");

View File

@@ -1,7 +1,5 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating; using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens; using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -11,6 +9,7 @@ using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Handlers; using GitHub.Runner.Worker.Handlers;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -141,21 +140,7 @@ namespace GitHub.Runner.Worker
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>(); IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
// Makes directory for event_path data ExecutionContext.WriteWebhookPayload();
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
ExecutionContext.SetGitHubContext("event_path", workflowFile);
}
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository // Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction && if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&

View File

@@ -109,6 +109,7 @@ namespace GitHub.Runner.Worker
void ForceTaskComplete(); void ForceTaskComplete();
void RegisterPostJobStep(IStep step); void RegisterPostJobStep(IStep step);
void PublishStepTelemetry(); void PublishStepTelemetry();
void WriteWebhookPayload();
} }
public sealed class ExecutionContext : RunnerService, IExecutionContext public sealed class ExecutionContext : RunnerService, IExecutionContext
@@ -550,10 +551,15 @@ namespace GitHub.Runner.Worker
issue.Message = issue.Message[.._maxIssueMessageLength]; issue.Message = issue.Message[.._maxIssueMessageLength];
} }
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
if (_record.Order != null)
{
issue.Data["stepNumber"] = _record.Order.ToString();
}
if (issue.Type == IssueType.Error) if (issue.Type == IssueType.Error)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Error, logMessage); long logLineNumber = Write(WellKnownTags.Error, logMessage);
@@ -569,8 +575,6 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Warning) else if (issue.Type == IssueType.Warning)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Warning, logMessage); long logLineNumber = Write(WellKnownTags.Warning, logMessage);
@@ -586,9 +590,6 @@ namespace GitHub.Runner.Worker
} }
else if (issue.Type == IssueType.Notice) else if (issue.Type == IssueType.Notice)
{ {
// tracking line number for each issue in log file
// log UI use this to navigate from issue to log
if (!string.IsNullOrEmpty(logMessage)) if (!string.IsNullOrEmpty(logMessage))
{ {
long logLineNumber = Write(WellKnownTags.Notice, logMessage); long logLineNumber = Write(WellKnownTags.Notice, logMessage);
@@ -991,6 +992,24 @@ namespace GitHub.Runner.Worker
} }
} }
public void WriteWebhookPayload()
{
// Makes directory for event_path data
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
Directory.CreateDirectory(workflowDirectory);
var gitHubEvent = GetGitHubContext("event");
// adds the GitHub event path/file if the event exists
if (gitHubEvent != null)
{
var workflowFile = Path.Combine(workflowDirectory, "event.json");
Trace.Info($"Write event payload to {workflowFile}");
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
SetGitHubContext("event_path", workflowFile);
}
}
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order) private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
{ {
_mainTimelineId = timelineId; _mainTimelineId = timelineId;

View File

@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
{ {
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase) private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{ {
"action",
"action_path", "action_path",
"action_ref", "action_ref",
"action_repository", "action_repository",
"action",
"actor", "actor",
"api_url", "api_url",
"base_ref", "base_ref",
@@ -22,12 +22,12 @@ namespace GitHub.Runner.Worker
"head_ref", "head_ref",
"job", "job",
"path", "path",
"ref",
"ref_name", "ref_name",
"ref_protected", "ref_protected",
"ref_type", "ref_type",
"repository", "ref",
"repository_owner", "repository_owner",
"repository",
"retention_days", "retention_days",
"run_attempt", "run_attempt",
"run_id", "run_id",
@@ -35,6 +35,7 @@ namespace GitHub.Runner.Worker
"server_url", "server_url",
"sha", "sha",
"step_summary", "step_summary",
"triggering_actor",
"workflow", "workflow",
"workspace", "workspace",
}; };

View File

@@ -36,6 +36,7 @@ namespace GitHub.Runner.Worker.Handlers
protected IActionCommandManager ActionCommandManager { get; private set; } protected IActionCommandManager ActionCommandManager { get; private set; }
public Pipelines.ActionStepDefinitionReference Action { get; set; } public Pipelines.ActionStepDefinitionReference Action { get; set; }
public bool IsActionStep => Action != null;
public Dictionary<string, string> Environment { get; set; } public Dictionary<string, string> Environment { get; set; }
public Variables RuntimeVariables { get; set; } public Variables RuntimeVariables { get; set; }
public IExecutionContext ExecutionContext { get; set; } public IExecutionContext ExecutionContext { get; set; }
@@ -49,13 +50,18 @@ namespace GitHub.Runner.Worker.Handlers
// Print out action details // Print out action details
PrintActionDetails(stage); PrintActionDetails(stage);
// Get telemetry for the action. // Get telemetry for the action
PopulateActionTelemetry(); PopulateActionTelemetry(stage);
} }
protected void PopulateActionTelemetry() protected void PopulateActionTelemetry(ActionRunStage stage)
{ {
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry) if (!IsActionStep)
{
ExecutionContext.StepTelemetry.Type = "runner";
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
}
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
{ {
ExecutionContext.StepTelemetry.Type = "docker"; ExecutionContext.StepTelemetry.Type = "docker";
var registryAction = Action as Pipelines.ContainerRegistryReference; var registryAction = Action as Pipelines.ContainerRegistryReference;

View File

@@ -24,16 +24,22 @@ namespace GitHub.Runner.Worker.Handlers
protected override void PrintActionDetails(ActionRunStage stage) protected override void PrintActionDetails(ActionRunStage stage)
{ {
// if we're executing a Job Extension, we won't have an 'Action'
if (stage == ActionRunStage.Post) if (!IsActionStep)
{ {
throw new NotSupportedException("Script action should not have 'Post' job action."); if (Inputs.TryGetValue("path", out var path))
{
ExecutionContext.Output($"##[group]Run '{path}'");
} }
else
{
throw new InvalidOperationException("Inputs 'path' must be set for job extensions");
}
}
else if (Action.Type == Pipelines.ActionSourceType.Script)
{
Inputs.TryGetValue("script", out string contents); Inputs.TryGetValue("script", out string contents);
contents = contents ?? string.Empty; contents = contents ?? string.Empty;
if (Action.Type == Pipelines.ActionSourceType.Script)
{
var firstLine = contents.TrimStart(' ', '\t', '\r', '\n'); var firstLine = contents.TrimStart(' ', '\t', '\r', '\n');
var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' }); var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' });
if (firstNewLine >= 0) if (firstNewLine >= 0)
@@ -42,18 +48,17 @@ namespace GitHub.Runner.Worker.Handlers
} }
ExecutionContext.Output($"##[group]Run {firstLine}"); ExecutionContext.Output($"##[group]Run {firstLine}");
}
else
{
throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}");
}
var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n'); var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines) foreach (var line in multiLines)
{ {
// Bright Cyan color // Bright Cyan color
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m"); ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
} }
}
else
{
throw new InvalidOperationException($"Invalid action type {Action?.Type} for {nameof(ScriptHandler)}");
}
string argFormat; string argFormat;
string shellCommand; string shellCommand;
@@ -132,11 +137,6 @@ namespace GitHub.Runner.Worker.Handlers
public async Task RunAsync(ActionRunStage stage) public async Task RunAsync(ActionRunStage stage)
{ {
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
}
// Validate args // Validate args
Trace.Entering(); Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
@@ -212,7 +212,8 @@ namespace GitHub.Runner.Worker.Handlers
} }
} }
if (!string.IsNullOrEmpty(shellCommand)) // Don't override runner telemetry here
if (!string.IsNullOrEmpty(shellCommand) && IsActionStep)
{ {
ExecutionContext.StepTelemetry.Action = shellCommand; ExecutionContext.StepTelemetry.Action = shellCommand;
} }
@@ -222,10 +223,24 @@ namespace GitHub.Runner.Worker.Handlers
{ {
throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'"); throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'");
} }
string scriptFilePath, resolvedScriptPath;
if (IsActionStep)
{
// We do not not the full path until we know what shell is being used, so that we can determine the file extension // We do not not the full path until we know what shell is being used, so that we can determine the file extension
var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}"); scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}");
var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}"; resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}";
}
else
{
// JobExtensionRunners run a script file, we load that from the inputs here
if (!Inputs.ContainsKey("path"))
{
throw new ArgumentException("Expected 'path' input to be set");
}
scriptFilePath = Inputs["path"];
ArgUtil.NotNullOrEmpty(scriptFilePath, "path");
resolvedScriptPath = Inputs["path"].Replace("\"", "\\\"");
}
// Format arg string with script path // Format arg string with script path
var arguments = string.Format(argFormat, resolvedScriptPath); var arguments = string.Format(argFormat, resolvedScriptPath);
@@ -242,8 +257,11 @@ namespace GitHub.Runner.Worker.Handlers
// Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14). // Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14).
var encoding = new UTF8Encoding(false); var encoding = new UTF8Encoding(false);
#endif #endif
if (IsActionStep)
{
// Script is written to local path (ie host) but executed relative to the StepHost, which may be a container // Script is written to local path (ie host) but executed relative to the StepHost, which may be a container
File.WriteAllText(scriptFilePath, contents, encoding); File.WriteAllText(scriptFilePath, contents, encoding);
}
// Prepend PATH // Prepend PATH
AddPrependPathToEnvironment(); AddPrependPathToEnvironment();

View File

@@ -1,6 +1,8 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Worker.Handlers namespace GitHub.Runner.Worker.Handlers
{ {
@@ -79,5 +81,22 @@ namespace GitHub.Runner.Worker.Handlers
throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}"); throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}");
} }
} }
internal static string GetDefaultShellForScript(string path, Common.Tracing trace, string prependPath)
{
var format = "{0} {1}";
switch (Path.GetExtension(path))
{
case ".sh":
// use 'sh' args but prefer bash
var pathToShell = WhichUtil.Which("bash", false, trace, prependPath) ?? WhichUtil.Which("sh", true, trace, prependPath);
return string.Format(format, pathToShell, _defaultArguments["sh"]);
case ".ps1":
var pathToPowershell = WhichUtil.Which("pwsh", false, trace, prependPath) ?? WhichUtil.Which("powershell", true, trace, prependPath);
return string.Format(format, pathToPowershell, _defaultArguments["powershell"]);
default:
throw new ArgumentException($"{path} is not a valid path to a script. Make sure it ends in '.sh' or '.ps1'.");
}
}
} }
} }

View File

@@ -15,6 +15,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
@@ -248,6 +249,19 @@ namespace GitHub.Runner.Worker
Trace.Info("Downloading actions"); Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>(); var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps); var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
// add hook to preJobSteps
var startedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED");
if (!string.IsNullOrEmpty(startedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Pre, startedHookPath);
preJobSteps.Add(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobStartedStepName,
data: (object)jobHookData));
}
preJobSteps.AddRange(prepareResult.ContainerSetupSteps); preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps // Add start-container steps, record and stop-container steps
@@ -337,6 +351,18 @@ namespace GitHub.Runner.Worker
} }
} }
// Register Job Completed hook if the variable is set
var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED");
if (!string.IsNullOrEmpty(completedHookPath))
{
var hookProvider = HostContext.GetService<IJobHookProvider>();
var jobHookData = new JobHookData(ActionRunStage.Post, completedHookPath);
jobContext.RegisterPostJobStep(new JobExtensionRunner(runAsync: hookProvider.RunHook,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: Constants.Hooks.JobCompletedStepName,
data: (object)jobHookData));
}
List<IStep> steps = new List<IStep>(); List<IStep> steps = new List<IStep>();
steps.AddRange(preJobSteps); steps.AddRange(preJobSteps);
steps.AddRange(jobSteps); steps.AddRange(jobSteps);
@@ -406,7 +432,7 @@ namespace GitHub.Runner.Worker
// create a new timeline record node for 'Finalize job' // create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post); IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
context.StepTelemetry.Type = "runner"; context.StepTelemetry.Type = "runner";
context.StepTelemetry.Action = "complete_joh"; context.StepTelemetry.Action = "complete_job";
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); })) using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{ {
try try

View File

@@ -0,0 +1,95 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(JobHookProvider))]
public interface IJobHookProvider : IRunnerService
{
Task RunHook(IExecutionContext executionContext, object data);
}
public class JobHookData
{
public string Path {get; private set;}
public ActionRunStage Stage {get; private set;}
public JobHookData(ActionRunStage stage, string path)
{
Path = path;
Stage = stage;
}
}
public class JobHookProvider : RunnerService, IJobHookProvider
{
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
}
public async Task RunHook(IExecutionContext executionContext, object data)
{
// Get Inputs
var hookData = data as JobHookData;
ArgUtil.NotNull(hookData, nameof(JobHookData));
var displayName = hookData.Stage == ActionRunStage.Pre ? Constants.Hooks.JobStartedStepName : Constants.Hooks.JobCompletedStepName;
// Log to users so that they know how this step was injected
executionContext.Output($"A '{displayName}' has been configured by the self-hosted runner administrator");
// Validate script file.
if (!File.Exists(hookData.Path))
{
throw new FileNotFoundException("File doesn't exist");
}
executionContext.WriteWebhookPayload();
// Create the handler data.
var scriptDirectory = Path.GetDirectoryName(hookData.Path);
var stepHost = HostContext.CreateService<IDefaultStepHost>();
var prependPath = string.Join(Path.PathSeparator.ToString(), executionContext.Global.PrependPath.Reverse<string>());
Dictionary<string, string> inputs = new()
{
["path"] = hookData.Path,
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(hookData.Path, Trace, prependPath)
};
// Create the handler
var handlerFactory = HostContext.GetService<IHandlerFactory>();
var handler = handlerFactory.Create(
executionContext,
action: null,
stepHost,
new ScriptActionExecutionData(),
inputs,
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
executionContext.Global.Variables,
actionDirectory: scriptDirectory,
localActionContainerSetupSteps: null);
handler.PrepareExecution(hookData.Stage);
// Setup file commands
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(executionContext, null);
// Run the step and process the file commands
try
{
await handler.RunAsync(hookData.Stage);
}
finally
{
fileCommandManager.ProcessFiles(executionContext, executionContext.Global.Container);
}
}
}
}

View File

@@ -118,7 +118,7 @@ namespace GitHub.Runner.Common.Tests.Worker
await _actionRunner.RunAsync(); await _actionRunner.RunAsync();
//Assert //Assert
_ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once); _ec.Verify(x => x.WriteWebhookPayload(), Times.Once);
} }
[Fact] [Fact]

View File

@@ -144,6 +144,55 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void AddIssue_AddStepAndLineNumberInformation()
{
using (TestHostContext hc = CreateTestContext())
{
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning annotation that should have step and line number information" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice annotation that should have step and line number information" });
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Error).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Warning).Count() == 1)), Times.AtLeastOnce);
jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.Is<TimelineRecord>(t => t.Issues.Where(i => i.Data.ContainsKey("stepNumber") && i.Data.ContainsKey("logFileLineNumber") && i.Type == IssueType.Notice).Count() == 1)), Times.AtLeastOnce);
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Worker")] [Trait("Category", "Worker")]

View File

@@ -25,6 +25,7 @@ namespace GitHub.Runner.Common.Tests.Worker
private Mock<IPagingLogger> _logger; private Mock<IPagingLogger> _logger;
private Mock<IContainerOperationProvider> _containerProvider; private Mock<IContainerOperationProvider> _containerProvider;
private Mock<IDiagnosticLogManager> _diagnosticLogManager; private Mock<IDiagnosticLogManager> _diagnosticLogManager;
private Mock<IJobHookProvider> _jobHookProvider;
private CancellationTokenSource _tokenSource; private CancellationTokenSource _tokenSource;
private TestHostContext CreateTestContext([CallerMemberName] String testName = "") private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
@@ -40,6 +41,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_directoryManager = new Mock<IPipelineDirectoryManager>(); _directoryManager = new Mock<IPipelineDirectoryManager>();
_directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>())) _directoryManager.Setup(x => x.PrepareDirectory(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.WorkspaceOptions>()))
.Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" }); .Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" });
_jobHookProvider = new Mock<IJobHookProvider>();
IActionRunner step1 = new ActionRunner(); IActionRunner step1 = new ActionRunner();
IActionRunner step2 = new ActionRunner(); IActionRunner step2 = new ActionRunner();
@@ -111,7 +113,9 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SetSingleton(_containerProvider.Object); hc.SetSingleton(_containerProvider.Object);
hc.SetSingleton(_directoryManager.Object); hc.SetSingleton(_directoryManager.Object);
hc.SetSingleton(_diagnosticLogManager.Object); hc.SetSingleton(_diagnosticLogManager.Object);
hc.SetSingleton(_jobHookProvider.Object);
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext hc.EnqueueInstance<IPagingLogger>(_logger.Object); // JobExecutionContext
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job start hook
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job hc.EnqueueInstance<IPagingLogger>(_logger.Object); // Initial Job
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step2
@@ -120,6 +124,7 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // step5
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare1
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2 hc.EnqueueInstance<IPagingLogger>(_logger.Object); // prepare2
hc.EnqueueInstance<IPagingLogger>(_logger.Object); // job complete hook
hc.EnqueueInstance<IActionRunner>(step1); hc.EnqueueInstance<IActionRunner>(step1);
hc.EnqueueInstance<IActionRunner>(step2); hc.EnqueueInstance<IActionRunner>(step2);
@@ -348,5 +353,62 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _jobEc.Result); Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async Task EnsurePreAndPostHookStepsIfEnvExists()
{
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", "/foo/bar");
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", "/bar/foo");
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>(), It.IsAny<Guid>()))
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>(), new Dictionary<Guid, IActionRunner>())));
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);
var trace = hc.GetTrace();
var hookStart = result.First() as JobExtensionRunner;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(Constants.Hooks.JobStartedStepName, hookStart.DisplayName);
Assert.Equal(Constants.Hooks.JobCompletedStepName, (_jobEc.PostJobSteps.Last() as JobExtensionRunner).DisplayName);
}
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_STARTED", null);
Environment.SetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED", null);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void EnsureNoPreAndPostHookSteps()
{
using (TestHostContext hc = CreateTestContext())
{
var jobExtension = new JobExtension();
jobExtension.Initialize(hc);
_message.ActionsEnvironment = null;
_jobEc = new Runner.Worker.ExecutionContext {Result = TaskResult.Succeeded};
_jobEc.Initialize(hc);
_jobEc.InitializeJob(_message, _tokenSource.Token);
var x = _jobEc.JobSteps;
jobExtension.FinalizeJob(_jobEc, _message, DateTime.UtcNow);
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
Assert.Equal(0, _jobEc.PostJobSteps.Count);
}
}
} }
} }

View File

@@ -128,6 +128,7 @@ function layout ()
chmod +x "${LAYOUT_DIR}/bin/Runner.Worker" chmod +x "${LAYOUT_DIR}/bin/Runner.Worker"
chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost" chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost"
chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh" chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh"
chmod +x "${LAYOUT_DIR}/safe_sleep.sh"
fi fi
heading "Setup externals folder for $RUNTIME_ID runner's layout" heading "Setup externals folder for $RUNTIME_ID runner's layout"

View File

@@ -1 +1 @@
2.288.0 2.289.0