Compare commits

..

1 Commits

Author SHA1 Message Date
JoannaaKL
3abc186ede Update TaskAgentHttpClientBase.cs 2023-02-16 16:11:37 +01:00
281 changed files with 1168 additions and 5138 deletions

9
.editorconfig Normal file
View File

@@ -0,0 +1,9 @@
# https://editorconfig.org/
[*]
charset = utf-8 # Set default charset to utf-8
insert_final_newline = true # ensure all files end with a single newline
trim_trailing_whitespace = true # attempt to remove trailing whitespace on save
[*.md]
trim_trailing_whitespace = false # in markdown, "two trailing spaces" is unfortunately meaningful; it means `<br>`

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
shell: bash
id: sha
name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noexternals
name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime
name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256

View File

@@ -1,65 +0,0 @@
# ADR 2494: Runner Image Tags
**Date**: 2023-03-17
**Status**: Accepted<!-- |Accepted|Rejected|Superceded|Deprecated -->
## Context
Following the [adoption of actions-runner-controller by GitHub](https://github.com/actions/actions-runner-controller/discussions/2072) and the introduction of the new runner scale set autoscaling mode, we needed to provide a basic runner image that could be used off the shelf without much friction.
The [current runner image](https://github.com/actions/runner/pkgs/container/actions-runner) is published to GHCR. Each release of this image is tagged with the runner version and the most recent release is also tagged with `latest`.
While the use of `latest` is common practice, we recommend that users pin a specific version of the runner image for a predictable runtime and improved security posture. However, we still notice that a large number of end users are relying on the `latest` tag & raising issues when they encounter problems.
Add to that, the community actions-runner-controller maintainers have issued a [deprecation notice](https://github.com/actions/actions-runner-controller/issues/2056) of the `latest` tag for the existing runner images (https://github.com/orgs/actions-runner-controller/packages).
## Decision
Proceed with Option 2, keeping the `latest` tag and adding the `NOTES.txt` file to our helm charts with the notice.
### Option 1: Remove the `latest` tag
By removing the `latest` tag, we have to proceed with either of these options:
1. Remove the runner image reference in the `values.yaml` provided with the `gha-runner-scale-set` helm chart and mark these fields as required so that users have to explicitly specify a runner image and a specific tag. This will obviously introduce more friction for users who want to start using actions-runner-controller for the first time.
```yaml
spec:
containers:
- name: runner
image: ""
tag: ""
command: ["/home/runner/run.sh"]
```
1. Pin a specific runner image tag in the `values.yaml` provided with the `gha-runner-scale-set` helm chart. This will reduce friction for users who want to start using actions-runner-controller for the first time but will require us to update the `values.yaml` with every new runner release.
```yaml
spec:
containers:
- name: runner
image: "ghcr.io/actions/actions-runner"
tag: "v2.300.0"
command: ["/home/runner/run.sh"]
```
### Option 2: Keep the `latest` tag
Keeping the `latest` tag is also a reasonable option especially if we don't expect to make any breaking changes to the runner image. We could enhance this by adding a [NOTES.txt](https://helm.sh/docs/chart_template_guide/notes_files/) to the helm chart which will be displayed to the user after a successful helm install/upgrade. This will help users understand the implications of using the `latest` tag and how to pin a specific version of the runner image.
The runner image release workflow will need to be updated so that the image is pushed to GHCR and tagged only when the runner rollout has reached all scale units.
## Consequences
Proceeding with **option 1** means:
1. We will enhance the runtime predictability and security posture of our end users
1. We will have to update the `values.yaml` with every new runner release (that can be automated)
1. We will introduce friction for users who want to start using actions-runner-controller for the first time
Proceeding with **option 2** means:
1. We will have to continue to maintain the `latest` tag
1. We will assume that end users will be able to handle the implications of using the `latest` tag
1. Runner image release workflow needs to be updated

View File

@@ -157,7 +157,7 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
## Styling
We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md)
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
### Format C# Code
@@ -165,4 +165,4 @@ To format both staged and unstaged .cs files
```
cd ./src
./dev.(cmd|sh) format
```
```

View File

@@ -2,7 +2,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.2.0
ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y
@@ -24,26 +24,11 @@ RUN export DOCKER_ARCH=x86_64 \
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_ALLOW_RUNASROOT=1
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /actions-runner
COPY --from=build /actions-runner .
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker

View File

@@ -1,18 +1,15 @@
## Features
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
- Add `*.ghe.localhost` domains to hosted server check (#2536)
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
- Add support for ghe.com domain (#2420)
- Add docker cli to the runner image. (#2425)
## Bugs
- Fix JIT configurations on Windows (#2497)
- Guard against NullReference while creating HostContext (#2343)
- Handles broken symlink in `Which` (#2150, #2196)
- Adding curl retry for external tool downloads (#2552, #2557)
- Limit the time we wait for waiting websocket to connect. (#2554)
- Fix URL construction bug for RunService (#2396)
- Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313)
- Replace '(' and ')' with '[' and '] from OS.Description for fixing User-Agent header validation (#2288)
## Misc
- Bump container hooks version to 0.3.1 in runner image (#2496)
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
- Bump dotnet sdk to latest version. (#2392)
- Start calling run service for job completion (#2412, #2423)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1,5 +1,5 @@
[*.cs]
charset = utf-8-bom
charset = utf-8
insert_final_newline = true
csharp_new_line_before_else = true

View File

@@ -55,23 +55,12 @@ function acquireExternalTool() {
# Download from source to the partial file.
echo "Downloading $download_source"
mkdir -p "$(dirname "$download_target")" || checkRC 'mkdir'
CURL_VERSION=$(curl --version | awk 'NR==1{print $2}')
echo "Curl version: $CURL_VERSION"
# curl -f Fail silently (no output at all) on HTTP errors (H)
# -k Allow connections to SSL sites without certs (H)
# -S Show error. With -s, make curl show errors when they occur
# -L Follow redirects (H)
# -o FILE Write to FILE instead of stdout
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
else
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
fi
curl -fkSL -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
# Move the partial file to the download target.
mv "$partial_target" "$download_target" || checkRC 'mv'

View File

@@ -24,8 +24,7 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -63,8 +62,7 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -94,38 +92,24 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
unknownFailureRetryCount >= exitServiceAfterNFailures
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
stopping = true
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

View File

@@ -53,33 +53,6 @@ runWithManualTrap() {
done
}
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $*
else

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk;
using GitHub.Runner.Sdk;
using System;
using System.Collections.Generic;

View File

@@ -1,4 +1,4 @@
namespace GitHub.Runner.Common
namespace GitHub.Runner.Common
{
public enum ActionResult
{
@@ -10,4 +10,4 @@
Skipped = 3
}
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(BrokerServer))]
public interface IBrokerServer : IRunnerService
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
}
public sealed class BrokerServer : RunnerService, IBrokerServer
{
private bool _hasConnection;
private Uri _brokerUri;
private RawConnection _connection;
private BrokerHttpClient _brokerHttpClient;
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
{
_brokerUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_brokerHttpClient = await _connection.GetClientAsync<BrokerHttpClient>();
_hasConnection = true;
}
private void CheckConnection()
{
if (!_hasConnection)
{
throw new InvalidOperationException($"SetConnection");
}
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{
CheckConnection();
var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Logging;
using GitHub.Runner.Sdk;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk;
using GitHub.Runner.Sdk;
using System;
using System.IO;
using System.Linq;
@@ -50,12 +50,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrlV2 { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{

View File

@@ -132,7 +132,6 @@ namespace GitHub.Runner.Common
public static readonly string GenerateServiceConfig = "generateServiceConfig";
public static readonly string Help = "help";
public static readonly string Local = "local";
public static readonly string NoDefaultLabels = "no-default-labels";
public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it
@@ -262,8 +261,6 @@ namespace GitHub.Runner.Common
public static readonly string AccessToken = "system.accessToken";
public static readonly string Culture = "system.culture";
public static readonly string PhaseDisplayName = "system.phaseDisplayName";
public static readonly string JobRequestType = "system.jobRequestType";
public static readonly string OrchestrationId = "system.orchestrationId";
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk;
using GitHub.Runner.Sdk;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;

View File

@@ -220,7 +220,7 @@ namespace GitHub.Runner.Common
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile, true);
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Sdk;
using GitHub.Runner.Sdk;
using System;
using System.Diagnostics;
using System.Globalization;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Net.Http;
using GitHub.Runner.Sdk;
@@ -24,4 +24,4 @@ namespace GitHub.Runner.Common
return client;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
@@ -37,10 +37,10 @@ namespace GitHub.Runner.Common
{
ConnectMonitor(monitorSocketAddress);
}
private void StartMonitor(Guid jobId, string accessToken, Uri serverUri)
{
if (String.IsNullOrEmpty(accessToken))
if(String.IsNullOrEmpty(accessToken))
{
Trace.Info("No access token could be retrieved to start the monitor.");
return;
@@ -82,7 +82,7 @@ namespace GitHub.Runner.Common
_monitorSocket.Send(Encoding.UTF8.GetBytes(message));
Trace.Info("Finished EndMonitor writing to socket");
await Task.Delay(TimeSpan.FromSeconds(2));
await Task.Delay(TimeSpan.FromSeconds(2));
}
}
catch (SocketException e)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -11,10 +11,10 @@ using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Utilities.Internal;
using GitHub.Services.Results.Client;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Common
{
@@ -24,11 +24,13 @@ namespace GitHub.Runner.Common
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
void InitializeResultsClient(Uri uri, string token);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -42,6 +44,7 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
@@ -145,6 +148,12 @@ namespace GitHub.Runner.Common
InitializeWebsocketClient(TimeSpan.Zero);
}
public void InitializeResultsClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
@@ -199,15 +208,13 @@ namespace GitHub.Runner.Common
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
this._websocketClient = null;
}
}
@@ -254,7 +261,7 @@ namespace GitHub.Runner.Common
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Verbose(ex.ToString());
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
@@ -309,6 +316,15 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
@@ -17,10 +17,10 @@ namespace GitHub.Runner.Common
TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -31,7 +31,7 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForSummaryUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Common
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
private readonly ConcurrentQueue<SummaryUploadFileInfo> _summaryFileUploadQueue = new();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
@@ -60,17 +60,15 @@ namespace GitHub.Runner.Common
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _summaryUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
private IJobServer _jobServer;
private IResultsServer _resultsServer;
private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
private bool _queueInProcess = false;
private bool _resultsServiceOnly = false;
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
@@ -86,27 +84,19 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_jobServer = hostContext.GetService<IJobServer>();
_resultsServer = hostContext.GetService<IResultsServer>();
}
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
public void Start(Pipelines.AgentJobRequestMessage jobRequest)
{
Trace.Entering();
_resultsServiceOnly = resultServiceOnly;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (!resultServiceOnly)
{
_jobServer.InitializeWebsocketClient(serviceEndPoint);
}
_jobServer.InitializeWebsocketClient(serviceEndPoint);
// This code is usually wrapped by an instance of IExecutionContext which isn't available here.
jobRequest.Variables.TryGetValue("system.github.results_endpoint", out VariableValue resultsEndpointVariable);
@@ -117,19 +107,11 @@ namespace GitHub.Runner.Common
!string.IsNullOrEmpty(accessToken) &&
!string.IsNullOrEmpty(resultsReceiverEndpoint))
{
string liveConsoleFeedUrl = null;
Trace.Info("Initializing results client");
if (resultServiceOnly
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
&& !string.IsNullOrEmpty(feedStreamUrl))
{
liveConsoleFeedUrl = feedStreamUrl;
}
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
_resultsClientInitiated = true;
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
}
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -158,12 +140,12 @@ namespace GitHub.Runner.Common
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
_summaryUploadDequeueTask = ProcessSummaryUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _summaryUploadDequeueTask };
_queueInProcess = true;
}
@@ -194,9 +176,9 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
Trace.Verbose("Draining results summary upload queue.");
await ProcessSummaryUploadQueueAsync(runOnce: true);
Trace.Info("Results summary upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
@@ -207,9 +189,6 @@ namespace GitHub.Runner.Common
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info($"Disposing results server ...");
await _resultsServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
@@ -251,43 +230,21 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
public void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new ResultsUploadFileInfo()
var newFile = new SummaryUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
StepId = stepRecordId.ToString(),
DeleteSource = deleteSource
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, stepRecordId);
_summaryFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
@@ -388,14 +345,7 @@ namespace GitHub.Runner.Common
// Give at most 60s for each request.
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)))
{
if (_resultsServiceOnly)
{
await _resultsServer.AppendLiveConsoleFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
if (_firstConsoleOutputs)
@@ -487,18 +437,18 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
private async Task ProcessSummaryUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
List<SummaryUploadFileInfo> filesToUpload = new();
SummaryUploadFileInfo dequeueFile;
while (_summaryFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file uploads.
// process at most 10 file upload.
if (!runOnce && filesToUpload.Count > 10)
{
break;
@@ -509,7 +459,7 @@ namespace GitHub.Runner.Common
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
Trace.Info($"Uploading {filesToUpload.Count} summary files in one shot through results service.");
}
int errorCount = 0;
@@ -517,38 +467,30 @@ namespace GitHub.Runner.Common
{
try
{
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
await UploadSummaryFile(file);
}
catch (Exception ex)
{
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during summary file upload to results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
Trace.Info("Catch exception during summary file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
}
finally
{
errorCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
_resultsClientInitiated = false;
SendResultsTelemetry(ex);
}
}
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
Trace.Info("Tried to upload {0} summary files to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
@@ -557,24 +499,11 @@ namespace GitHub.Runner.Common
}
else
{
await Task.Delay(_delayForResultsUploadDequeue);
await Task.Delay(_delayForSummaryUploadDequeue);
}
}
}
private void SendResultsTelemetry(Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
}
private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false)
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
@@ -622,7 +551,7 @@ namespace GitHub.Runner.Common
foreach (var detailTimeline in update.PendingRecords.Where(r => r.Details != null))
{
if (!_resultsServiceOnly && !_allTimelines.Contains(detailTimeline.Details.Id))
if (!_allTimelines.Contains(detailTimeline.Details.Id))
{
try
{
@@ -644,27 +573,7 @@ namespace GitHub.Runner.Common
try
{
if (!_resultsServiceOnly)
{
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
try
{
if (_resultsClientInitiated)
{
await _resultsServer.UpdateResultsWorkflowStepsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
}
catch (Exception e)
{
Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e);
_resultsClientInitiated = false;
SendResultsTelemetry(e);
}
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
if (_bufferedRetryRecords.Remove(update.TimelineId))
{
Trace.Verbose("Cleanup buffered timeline record for timeline: {0}.", update.TimelineId);
@@ -756,17 +665,17 @@ namespace GitHub.Runner.Common
timelineRecord.State = rec.State ?? timelineRecord.State;
timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName;
if (rec.ErrorCount > 0)
if (rec.ErrorCount != null && rec.ErrorCount > 0)
{
timelineRecord.ErrorCount = rec.ErrorCount;
}
if (rec.WarningCount > 0)
if (rec.WarningCount != null && rec.WarningCount > 0)
{
timelineRecord.WarningCount = rec.WarningCount;
}
if (rec.NoticeCount > 0)
if (rec.NoticeCount != null && rec.NoticeCount > 0)
{
timelineRecord.NoticeCount = rec.NoticeCount;
}
@@ -797,7 +706,7 @@ namespace GitHub.Runner.Common
foreach (var record in mergedRecords)
{
Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}");
if (record.Issues != null)
if (record.Issues != null && record.Issues.Count > 0)
{
foreach (var issue in record.Issues)
{
@@ -807,7 +716,7 @@ namespace GitHub.Runner.Common
}
}
if (record.Variables != null)
if (record.Variables != null && record.Variables.Count > 0)
{
foreach (var variable in record.Variables)
{
@@ -824,30 +733,27 @@ namespace GitHub.Runner.Common
bool uploadSucceed = false;
try
{
if (!_resultsServiceOnly)
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
{
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
}
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
}
else
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
}
else
{
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
}
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
}
}
@@ -870,50 +776,16 @@ namespace GitHub.Runner.Common
}
}
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
private async Task UploadSummaryFile(SummaryUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _resultsServer.CreateResultsStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _resultsServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _resultsServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
if (!_resultsClientInitiated)
{
return;
}
bool uploadSucceed = false;
try
{
await uploadHandler(file);
// Upload the step summary
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateStepSymmaryAsync(file.PlanId, file.JobId, file.StepId, file.Path, cancellationTokenSource.Token);
uploadSucceed = true;
}
finally
@@ -926,7 +798,7 @@ namespace GitHub.Runner.Common
}
catch (Exception ex)
{
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Info("Catch exception during delete success results uploaded summary file.");
Trace.Error(ex);
}
}
@@ -950,20 +822,18 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class ResultsUploadFileInfo
internal class SummaryUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public Guid RecordId { get; set; }
public string StepId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}
internal class ConsoleLineInfo
{
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Common

View File

@@ -1,42 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Launch.Client;
using GitHub.Services.WebApi;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(LaunchServer))]
public interface ILaunchServer : IRunnerService
{
void InitializeLaunchClient(Uri uri, string token);
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
}
public sealed class LaunchServer : RunnerService, ILaunchServer
{
private LaunchHttpClient _launchClient;
public void InitializeLaunchClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
CancellationToken cancellationToken)
{
if (_launchClient != null)
{
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Launch client is not initialized.");
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace GitHub.Runner.Common
@@ -21,12 +21,6 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -38,13 +32,6 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -52,10 +39,8 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -75,17 +60,11 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
Create();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -99,25 +78,21 @@ namespace GitHub.Runner.Common
}
}
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
EndBlock(true);
}
private void Create()
{
NewPage();
}
private void NewPage()
@@ -142,27 +117,5 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.Collections.Generic;

View File

@@ -1,262 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Headers;
using System.Net.WebSockets;
using System.Security;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi.Utilities.Internal;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService, IAsyncDisposable
{
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
// logging and console
Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken);
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
}
public sealed class ResultServer : RunnerService, IResultsServer
{
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private DateTime? _lastConnectionFailure;
private static readonly TimeSpan MinDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan MaxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private Task _websocketConnectTask;
private String _liveConsoleFeedUrl;
private string _token;
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
_token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{
_liveConsoleFeedUrl = liveConsoleFeedUrl;
InitializeWebsocketClient(liveConsoleFeedUrl, token, TimeSpan.Zero, retryConnection: true);
}
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock,
lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
try
{
var timelineRecords = records.ToList();
return _resultsClient.UpdateWorkflowStepsAsync(planId, new List<TimelineRecord>(timelineRecords),
cancellationToken: cancellationToken);
}
catch (Exception ex)
{
// Log error, but continue as this call is best-effort
Trace.Info($"Failed to update steps status due to {ex.GetType().Name}");
Trace.Error(ex);
}
}
throw new InvalidOperationException("Results client is not initialized.");
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
{
if (string.IsNullOrEmpty(accessToken))
{
Trace.Info($"No access token from server");
return;
}
if (string.IsNullOrEmpty(liveConsoleFeedUrl))
{
Trace.Info($"No live console feed url from server");
return;
}
Trace.Info($"Creating websocket client ..." + liveConsoleFeedUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
var userAgentValues = new List<ProductInfoHeaderValue>();
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
userAgentValues.AddRange(HostContext.UserAgents);
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
// during initialization, retry upto 3 times to setup connection
this._websocketConnectTask = ConnectWebSocketClient(liveConsoleFeedUrl, delay, retryConnection);
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay, bool retryConnection = false)
{
bool connected = false;
int retries = 0;
do
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
Trace.Info($"Successfully started websocket client.");
connected = true;
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, retry connection.");
Trace.Error(ex);
retries++;
this._websocketClient = null;
_lastConnectionFailure = DateTime.Now;
}
} while (retryConnection && !connected && retries < 3);
}
public async Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
{
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
bool delivered = false;
int retries = 0;
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket
if (_websocketClient != null)
{
var linesWrapper = startLine.HasValue
? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value)
: new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
delivered = false;
while (!delivered && retries < 3)
{
try
{
if (_websocketClient != null)
{
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage: lastChunk, cancellationToken);
delivered = true;
}
}
catch (Exception ex)
{
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
Trace.Verbose(ex.ToString());
retries++;
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
}
}
}
}
if (!delivered)
{
// Giving up for now, so next invocation of this method won't attempt to reconnect
_websocketClient = null;
// however if 10 minutes have already passed, let's try reestablish connection again
if (_lastConnectionFailure.HasValue && DateTime.Now > _lastConnectionFailure.Value.AddMinutes(10))
{
// Some minutes passed since we retried last time, try connection again
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, TimeSpan.Zero);
}
}
return delivered;
}
private void CloseWebSocket(WebSocketCloseStatus closeStatus, CancellationToken cancellationToken)
{
try
{
_websocketClient?.CloseOutputAsync(closeStatus, "Closing websocket", cancellationToken);
}
catch (Exception websocketEx)
{
// In some cases this might be okay since the websocket might be open yet, so just close and don't trace exceptions
Trace.Info($"Failed to close websocket gracefully {websocketEx.GetType().Name}");
}
}
}
}

View File

@@ -7,7 +7,6 @@ using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
@@ -19,16 +18,7 @@ namespace GitHub.Runner.Common
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
@@ -58,30 +48,21 @@ namespace GitHub.Runner.Common
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
var jobMessage = RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken);
if (jobMessage == null)
{
throw new TaskOrchestrationJobNotFoundException(id);
}
return jobMessage;
}
public Task CompleteJobAsync(
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
CancellationToken cancellationToken)
public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
}
}
}

View File

@@ -1,237 +0,0 @@
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Linq;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService
{
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
}
public enum RequestType
{
Get,
Post,
Patch,
Delete
}
public class RunnerDotcomServer : RunnerService, IRunnerDotcomServer
{
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = hostContext.GetService<ITerminal>();
}
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
}
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
return agentPools?.ToAgentPoolList();
}
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
string githubApiUrl;
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runners/register";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runners/register";
}
var bodyObject = new Dictionary<string, Object>()
{
{"url", githubUrl},
{"group_id", runnerGroupId},
{"name", agent.Name},
{"version", agent.Version},
{"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral},
{"labels", agent.Labels},
{"public_key", publicKey}
};
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
}
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
{
int retry = 0;
while (true)
{
retry++;
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
HttpResponseMessage response = null;
if (requestType == RequestType.Get)
{
response = await httpClient.GetAsync(githubApiUrl);
}
else
{
response = await httpClient.PostAsync(githubApiUrl, body);
}
if (response != null)
{
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<T>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
}
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{
Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex);
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
}
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;

View File

@@ -80,11 +80,10 @@ namespace GitHub.Runner.Common
}
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
}
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5,
Func<Exception, bool> shouldRetry = null
int maxRetryAttemptsCount = 5
)
{
var retryCount = 0;
@@ -97,7 +96,7 @@ namespace GitHub.Runner.Common
return await func();
}
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
{
Trace.Error("Catch exception during request");
Trace.Error(ex);

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
@@ -93,4 +93,4 @@ namespace GitHub.Runner.Common
IndentLevel--;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Diagnostics;
using GitHub.DistributedTask.Logging;

View File

@@ -1,4 +1,4 @@
using Newtonsoft.Json;
using Newtonsoft.Json;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;

View File

@@ -1,8 +1,8 @@
// Represents absence of value.
// Represents absence of value.
namespace GitHub.Runner.Common
{
public readonly struct Unit
{
public static readonly Unit Value = default;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;

View File

@@ -4,7 +4,7 @@
public static class EnumUtil
{
public static T? TryParse<T>(string value) where T : struct
public static T? TryParse<T>(string value) where T: struct
{
T val;
if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val))

View File

@@ -1,14 +0,0 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace GitHub.Runner.Common.Util
{

View File

@@ -1,209 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Runner.Common.Util;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener
{
public sealed class BrokerMessageListener : RunnerService, IMessageListener
{
private RunnerSettings _settings;
private ITerminal _term;
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private IBrokerServer _brokerServer;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_brokerServer = HostContext.GetService<IBrokerServer>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
await RefreshBrokerConnection();
return await Task.FromResult(true);
}
public async Task DeleteSessionAsync()
{
await Task.CompletedTask;
}
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
{
bool encounteringError = false;
int continuousError = 0;
Stopwatch heartbeat = new();
heartbeat.Restart();
var maxRetryCount = 10;
while (true)
{
TaskAgentMessage message = null;
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try
{
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
if (message == null)
{
continue;
}
return message;
}
catch (OperationCanceledException) when (_getMessagesTokenSource.Token.IsCancellationRequested && !token.IsCancellationRequested)
{
Trace.Info("Get messages has been cancelled using local token source. Continue to get messages with new status.");
continue;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Get next message has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
Trace.Error(ex);
if (!IsGetNextMessageExceptionRetriable(ex))
{
throw;
}
else
{
continuousError++;
//retry after a random backoff to avoid service throttling
//in case of there is a service error happened and all agents get kicked off of the long poll and all agent try to reconnect back at the same time.
if (continuousError <= 5)
{
// random backoff [15, 30]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
}
else if (continuousError >= maxRetryCount)
{
throw;
}
else
{
// more aggressive backoff [30, 60]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60), _getNextMessageRetryInterval);
}
if (!encounteringError)
{
//print error only on the first consecutive error
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
// re-create VssConnection before next retry
await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
{
_getMessagesTokenSource.Dispose();
}
if (message == null)
{
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
{
Trace.Info($"No message retrieved within last 30 minutes.");
heartbeat.Restart();
}
else
{
Trace.Verbose($"No message retrieved.");
}
continue;
}
Trace.Info($"Message '{message.MessageId}' received.");
}
}
public async Task DeleteMessageAsync(TaskAgentMessage message)
{
await Task.CompletedTask;
}
private bool IsGetNextMessageExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException ||
ex is TaskAgentPoolNotFoundException ||
ex is TaskAgentSessionExpiredException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnection()
{
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
if (_settings.ServerUrlV2 == null)
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
@@ -90,4 +90,4 @@ namespace GitHub.Runner.Listener.Check
return result;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Diagnostics.Tracing;
using System.IO;
@@ -415,4 +415,4 @@ namespace GitHub.Runner.Listener.Check
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Threading.Tasks;
using GitHub.Runner.Common;
@@ -27,4 +27,4 @@ namespace GitHub.Runner.Listener.Check
public List<string> Logs { get; set; }
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
@@ -56,4 +56,4 @@ namespace GitHub.Runner.Listener.Check
return result;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Common.Util;
using System;
using System.Collections;
@@ -29,8 +29,8 @@ namespace GitHub.Runner.Listener
private readonly Dictionary<string, string[]> validOptions = new()
{
// Valid configure flags and args
[Constants.Runner.CommandLine.Commands.Configure] =
new string[]
[Constants.Runner.CommandLine.Commands.Configure] =
new string[]
{
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral,
@@ -38,7 +38,6 @@ namespace GitHub.Runner.Listener
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.NoDefaultLabels,
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
@@ -86,7 +85,6 @@ namespace GitHub.Runner.Listener
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig);
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool NoDefaultLabels => TestFlag(Constants.Runner.CommandLine.Flags.NoDefaultLabels);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
@@ -184,7 +182,7 @@ namespace GitHub.Runner.Listener
{
command = Constants.Runner.CommandLine.Commands.Warmup;
}
return command;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
@@ -31,14 +31,12 @@ namespace GitHub.Runner.Listener.Configuration
{
private IConfigurationStore _store;
private IRunnerServer _runnerServer;
private IRunnerDotcomServer _dotcomServer;
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_runnerServer = HostContext.GetService<IRunnerServer>();
_dotcomServer = HostContext.GetService<IRunnerDotcomServer>();
Trace.Verbose("Creating _store");
_store = hostContext.GetService<IConfigurationStore>();
Trace.Verbose("store created");
@@ -115,7 +113,6 @@ namespace GitHub.Runner.Listener.Configuration
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
string registerToken = string.Empty;
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
@@ -133,11 +130,9 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow;
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -181,11 +176,9 @@ namespace GitHub.Runner.Listener.Configuration
// We want to use the native CSP of the platform for storage, so we use the RSACSP directly
RSAParameters publicKey;
var keyManager = HostContext.GetService<IRSAKeyManager>();
string publicKeyXML;
using (var rsa = keyManager.CreateKey())
{
publicKey = rsa.ExportParameters(false);
publicKeyXML = rsa.ToXmlString(includePrivateParameters: false);
}
_term.WriteSection("Runner Registration");
@@ -193,17 +186,9 @@ namespace GitHub.Runner.Listener.Configuration
// If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null;
TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools;
if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
else
{
agentPools = await _runnerServer.GetAgentPoolsAsync();
}
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
{
poolName = command.GetRunnerGroupName(defaultPool?.Name);
@@ -241,16 +226,8 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels();
_term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
else
{
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
}
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
if (agent != null)
@@ -259,7 +236,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try
{
@@ -293,27 +270,11 @@ namespace GitHub.Runner.Listener.Configuration
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try
{
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
}
else
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
}
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
@@ -364,28 +325,24 @@ namespace GitHub.Runner.Listener.Configuration
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
if (!runnerSettings.UseV2Flow)
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
_term.WriteSection("Runner settings");
@@ -554,7 +511,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -571,16 +528,9 @@ namespace GitHub.Runner.Listener.Configuration
agent.Labels.Clear();
if (!noDefaultLabels)
{
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
@@ -590,7 +540,7 @@ namespace GitHub.Runner.Listener.Configuration
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{
TaskAgent agent = new(agentName)
{
@@ -605,16 +555,9 @@ namespace GitHub.Runner.Listener.Configuration
DisableUpdate = disableUpdate
};
if (!noDefaultLabels)
{
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
@@ -709,7 +652,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -772,7 +715,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -801,5 +744,14 @@ namespace GitHub.Runner.Listener.Configuration
}
return null;
}
private string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -20,8 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
{
public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase)
{
{ Constants.Configuration.OAuth, typeof(OAuthCredential) },
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential) },
{ Constants.Configuration.OAuth, typeof(OAuthCredential)},
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)},
};
public ICredentialProvider GetCredentialProvider(string credType)
@@ -93,9 +93,6 @@ namespace GitHub.Runner.Listener.Configuration
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials()
{
ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema));

View File

@@ -1,4 +1,4 @@
#if OS_WINDOWS
#if OS_WINDOWS
#pragma warning disable CA1416
using System;
using System.Collections;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System;

View File

@@ -1,4 +1,4 @@
#if OS_WINDOWS
#if OS_WINDOWS
using System.IO;
using System.Security.Cryptography;
using System.Text;

View File

@@ -1,4 +1,4 @@
#if OS_LINUX || OS_OSX
#if OS_LINUX || OS_OSX
using System;
using System.IO;
using System.Security.Cryptography;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Linq;
using System.Text.RegularExpressions;
using GitHub.Runner.Common;
@@ -68,7 +68,7 @@ namespace GitHub.Runner.Listener.Configuration
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
var random = new Random();
var num = random.Next(1000, 9999).ToString();
runnerNameSubstring += $"-{num}";
runnerNameSubstring +=$"-{num}";
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
}
@@ -76,12 +76,12 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
}
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
const int MaxServiceNameLength = 80;
const int MaxRepoOrgCharacters = 45;
#endif
#endif
}
}

View File

@@ -1,4 +1,4 @@
#if OS_LINUX
#if OS_LINUX
using System;
using System.Collections.Generic;
using System.IO;

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.Linq;

View File

@@ -1,4 +1,4 @@
#if OS_WINDOWS
#if OS_WINDOWS
#pragma warning disable CA1416
using System;
using System.IO;

View File

@@ -1,3 +0,0 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
@@ -7,7 +7,6 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -15,7 +14,6 @@ using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Jwt;
using Sdk.RSWebApi.Contracts;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Listener
@@ -60,8 +58,6 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -90,8 +86,6 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -245,13 +239,6 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -373,8 +360,6 @@ namespace GitHub.Runner.Listener
TaskCompletionSource<int> firstJobRequestRenewed = new();
var notification = HostContext.GetService<IJobNotification>();
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
@@ -384,7 +369,7 @@ namespace GitHub.Runner.Listener
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -406,7 +391,7 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, TaskResult.Canceled);
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
@@ -441,7 +426,7 @@ namespace GitHub.Runner.Listener
{
workerOutput.Add(stdout.Data);
}
if (printToStdout)
{
term.WriteLine(stdout.Data, skipTracing: true);
@@ -523,6 +508,7 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -545,14 +531,18 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = await InitializeJobServerAsync(systemConnection);
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
{
Trace.Info($"Finish job with result 'Failed' due to IOException.");
await ForceFailJob(jobServer, message, detailInfo);
await ForceFailJob(jobServer, message);
}
}
@@ -567,7 +557,7 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, result, detailInfo);
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
@@ -664,7 +654,7 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, resultOnAbandonOrCancel);
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
@@ -685,128 +675,9 @@ namespace GitHub.Runner.Listener
}
}
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -969,93 +840,90 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var server = await InitializeJobServerAsync(systemConnection);
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
if (server is IJobServer jobServer)
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
foreach (var pages in logPages)
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (updatedRecords.Count > 0)
if (!int.TryParse(logNameParts[2], out pageNumber))
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
else
foreach (var pages in logPages)
{
Trace.Info("Job server does not support log upload yet.");
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
catch (Exception ex)
@@ -1065,7 +933,7 @@ namespace GitHub.Runner.Listener
}
}
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, Guid lockToken, TaskResult result, string detailInfo = null)
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{
Trace.Entering();
@@ -1075,28 +943,6 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (unhandledAnnotation.HasValue)
{
jobAnnotations.Add(unhandledAnnotation.Value);
}
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
@@ -1133,102 +979,66 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
if (server is IJobServer jobServer)
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
{
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
}
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
else
catch (Exception ex)
{
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
return;
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
{
if (server is IJobServer jobServer)
try
{
try
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
else if (server is IRunServer runServer)
catch (Exception ex)
{
try
{
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (unhandledAnnotation.HasValue)
{
jobAnnotations.Add(unhandledAnnotation.Value);
}
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
@@ -182,7 +182,7 @@ namespace GitHub.Runner.Listener
try
{
_getMessagesTokenSource?.Cancel();
}
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
@@ -245,10 +245,6 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true;
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
@@ -293,7 +289,7 @@ namespace GitHub.Runner.Listener
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
finally
{
_getMessagesTokenSource.Dispose();
}

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System;
using System.Globalization;
@@ -6,7 +6,6 @@ using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener
{
@@ -59,7 +58,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#if ARM64
#if ARM64
// A little hacky, but windows gives no way to differentiate between windows 10 and 11.
// By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11
// https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm
@@ -70,7 +69,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#endif
#endif
break;
default:
terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}.");
@@ -138,12 +137,6 @@ namespace GitHub.Runner.Listener
}
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e)
{
terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -1,16 +1,14 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -138,7 +136,7 @@ namespace GitHub.Runner.Listener
if (command.Remove)
{
// only remove local config files and exit
if (command.RemoveLocalConfig)
if(command.RemoveLocalConfig)
{
configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success;
@@ -211,16 +209,10 @@ namespace GitHub.Runner.Listener
foreach (var config in jitConfig)
{
var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key);
var configContent = Convert.FromBase64String(config.Value);
#if OS_WINDOWS
if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials))
{
configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine);
}
#endif
File.WriteAllBytes(configFile, configContent);
var configContent = Encoding.UTF8.GetString(Convert.FromBase64String(config.Value));
File.WriteAllText(configFile, configContent, Encoding.UTF8);
File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden);
Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'.");
Trace.Info($"Save {configContent.Length} chars to '{configFile}'.");
}
}
catch (Exception ex)
@@ -339,26 +331,13 @@ namespace GitHub.Runner.Listener
}
}
private IMessageListener GetMesageListener(RunnerSettings settings)
{
if (settings.UseV2Flow)
{
Trace.Info($"Using BrokerMessageListener");
var brokerListener = new BrokerMessageListener();
brokerListener.Initialize(HostContext);
return brokerListener;
}
return HostContext.GetService<IMessageListener>();
}
//create worker manager, create message listener and start listening to the queue
private async Task<int> RunAsync(RunnerSettings settings, bool runOnce = false)
{
try
{
Trace.Info(nameof(RunAsync));
_listener = GetMesageListener(settings);
_listener = HostContext.GetService<IMessageListener>();
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
{
return Constants.Runner.ReturnCode.TerminatedError;
@@ -523,7 +502,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (MessageUtil.IsRunServiceJob(message.MessageType))
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
{
if (autoUpdateInProgress || runOnceJobReceived)
{
@@ -549,17 +528,7 @@ namespace GitHub.Runner.Listener
{
var runServer = HostContext.CreateService<IRunServer>();
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
try
{
jobRequestMessage =
await runServer.GetJobMessageAsync(messageRef.RunnerRequestId,
messageQueueLoopTokenSource.Token);
}
catch (TaskOrchestrationJobAlreadyAcquiredException)
{
Trace.Info("Job is already acquired, skip this message.");
continue;
}
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
}
jobDispatcher.Run(jobRequestMessage, runOnce);
@@ -683,8 +652,7 @@ Config Options:
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
--labels string Custom labels that will be added to the runner. This option is mandatory if --no-default-labels is used.
--no-default-labels Disables adding the default labels: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--local Removes the runner config files from your local machine. Used as an option to the remove command
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)

View File

@@ -1,4 +1,4 @@
using System.Runtime.Serialization;
using System.Runtime.Serialization;
namespace GitHub.Runner.Listener
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Globalization;
using System.IO;
using System.Reflection;

View File

@@ -682,4 +682,4 @@ namespace GitHub.Runner.Plugins.Artifact
: base(message, inner)
{ }
}
}
}

View File

@@ -1,4 +1,4 @@
using System.Threading;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.Pipelines.WebApi;
using GitHub.Services.WebApi;
@@ -36,7 +36,7 @@ namespace GitHub.Runner.Plugins.Artifact
return await _pipelinesHttpClient.CreateArtifactAsync(
parameters,
pipelineId,
pipelineId,
runId,
cancellationToken: cancellationToken) as Pipelines.ActionsStorageArtifact;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -97,8 +97,8 @@ namespace GitHub.Runner.Plugins.Artifact
size,
token);
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

View File

@@ -1,4 +1,4 @@
using Pipelines = GitHub.DistributedTask.Pipelines;
using Pipelines = GitHub.DistributedTask.Pipelines;
using System;
using System.Collections.Generic;
using System.Threading;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;

View File

@@ -1,4 +1,4 @@
using Pipelines = GitHub.DistributedTask.Pipelines;
using Pipelines = GitHub.DistributedTask.Pipelines;
using System;
using System.Collections.Generic;
using System.Threading;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Headers;
@@ -220,12 +220,20 @@ namespace GitHub.Runner.Sdk
return input;
}
private Dictionary<string, string> _commandEscapeMappings = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
private Dictionary<string, string> _commandEscapeMappings = new(StringComparer.OrdinalIgnoreCase)
{
{ ";", "%3B" },
{ "\r", "%0D" },
{ "\n", "%0A" },
{ "]", "%5D" },
{
";", "%3B"
},
{
"\r", "%0D"
},
{
"\n", "%0A"
},
{
"]", "%5D"
},
};
}
}

View File

@@ -1,4 +1,4 @@
namespace GitHub.Runner.Sdk
namespace GitHub.Runner.Sdk
{
/***
* WARNING: This file is automatically regenerated on layout so the runner can provide version/commit info (do not manually edit it).

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.ComponentModel;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Net;
using System.Text.RegularExpressions;
@@ -164,6 +164,7 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -206,11 +207,6 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace GitHub.Runner.Sdk

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
@@ -40,19 +40,10 @@ namespace GitHub.Runner.Sdk
File.WriteAllText(path, StringUtil.ConvertToJson(obj), Encoding.UTF8);
}
public static T LoadObject<T>(string path, bool required = false)
public static T LoadObject<T>(string path)
{
string json = File.ReadAllText(path, Encoding.UTF8);
if (required && string.IsNullOrEmpty(json))
{
throw new ArgumentNullException($"File {path} is empty");
}
T result = StringUtil.ConvertFromJson<T>(json);
if (required && result == null)
{
throw new ArgumentException("Converting json to object resulted in a null value");
}
return result;
return StringUtil.ConvertFromJson<T>(json);
}
public static string GetSha256Hash(string path)

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
namespace GitHub.Runner.Sdk
{

View File

@@ -1,4 +1,4 @@
using GitHub.Services.WebApi;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using System;
using System.Globalization;

View File

@@ -11,11 +11,10 @@ namespace GitHub.Runner.Sdk
return false;
}
return
return
string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.com", StringComparison.OrdinalIgnoreCase);
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
@@ -60,7 +60,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
if (matches != null && matches.Length > 0)
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -86,7 +86,7 @@ namespace GitHub.Runner.Sdk
for (int i = 0; i < pathExtSegments.Length; i++)
{
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)) && IsPathValid(fullPath, trace))
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)))
{
trace?.Info($"Location: '{fullPath}'");
return fullPath;
@@ -105,7 +105,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
if (matches != null && matches.Length > 0)
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -128,15 +128,5 @@ namespace GitHub.Runner.Sdk
return null;
}
// checks if the file is a symlink and if the symlink`s target exists.
private static bool IsPathValid(string path, ITraceWriter trace = null)
{
var fileInfo = new FileInfo(path);
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
if (fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
return false;
}
}
}

View File

@@ -36,7 +36,7 @@ namespace RunnerService
catch (Win32Exception ex)
{
Console.WriteLine("[ERROR] Unable to create '{0}' event source under 'Application' event log.", RunnerService.EventSourceName);
Console.WriteLine("[ERROR] {0}", ex.Message);
Console.WriteLine("[ERROR] {0}",ex.Message);
Console.WriteLine("[ERROR] Error Code: {0}", ex.ErrorCode);
return 1;
}

View File

@@ -276,7 +276,7 @@ namespace GitHub.Runner.Worker
Message = $"Can't update {blocked} environment variable using ::set-env:: command."
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{envName}";
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
return;
}
@@ -315,7 +315,7 @@ namespace GitHub.Runner.Worker
Message = String.Format(Constants.Runner.UnsupportedCommandMessage, this.Command)
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.UnsupportedCommand;
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
@@ -350,7 +350,7 @@ namespace GitHub.Runner.Worker
Message = String.Format(Constants.Runner.UnsupportedCommandMessage, this.Command)
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.UnsupportedCommand;
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
@@ -666,7 +666,7 @@ namespace GitHub.Runner.Worker
}
}
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
public static void ValidateLinesAndColumns(ActionCommand command, IExecutionContext context)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
@@ -11,14 +11,12 @@ using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Worker
{
@@ -102,19 +100,7 @@ namespace GitHub.Runner.Worker
}
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
executionContext.Output("Prepare all required actions");
PrepareActionsState result = new PrepareActionsState();
try
{
result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
}
catch (FailedToResolveActionDownloadInfoException ex)
{
// Log the error and fail the PrepareActionsAsync Initialization.
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
executionContext.InfrastructureError(ex.Message);
executionContext.Result = TaskResult.Failed;
throw;
}
var result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
{
if (state.ImagesToPull.Count > 0)
@@ -317,28 +303,15 @@ namespace GitHub.Runner.Worker
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
if (FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
Trace.Info("Load action that reference container from registry.");
CachedActionContainers.TryGetValue(action.Id, out var container);
ArgUtil.NotNull(container, nameof(container));
definition.Data.Execution = new ContainerActionExecutionData()
{
Trace.Info("Load action that will run container through container hooks.");
var containerAction = action.Reference as Pipelines.ContainerRegistryReference;
definition.Data.Execution = new ContainerActionExecutionData()
{
Image = containerAction.Image,
};
Trace.Info($"Using action container image: {containerAction.Image}.");
}
else
{
Trace.Info("Load action that reference container from registry.");
CachedActionContainers.TryGetValue(action.Id, out var container);
ArgUtil.NotNull(container, nameof(container));
definition.Data.Execution = new ContainerActionExecutionData()
{
Image = container.ContainerImage
};
Image = container.ContainerImage
};
Trace.Info($"Using action container image: {container.ContainerImage}.");
}
Trace.Info($"Using action container image: {container.ContainerImage}.");
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
@@ -675,21 +648,13 @@ namespace GitHub.Runner.Worker
}
// Resolve download info
var launchServer = HostContext.GetService<ILaunchServer>();
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
if (MessageUtil.IsRunServiceJob(executionContext.Global.Variables.Get(Constants.Variables.System.JobRequestType)))
{
actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
}
else
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
}
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
@@ -141,7 +141,7 @@ namespace GitHub.Runner.Worker
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error("And let's duplicate errors: " + error.Message);
executionContext.Error(error.Message);
}
throw new ArgumentException($"Fail to load {fileRelativePath}");
@@ -448,7 +448,7 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase)||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(mainToken?.Value))

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Runtime.Serialization;
namespace GitHub.Runner.Worker
@@ -30,4 +30,4 @@ namespace GitHub.Runner.Worker
return $"An action could not be found at the URI '{actionUri}'";
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Text;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;

View File

@@ -1,4 +1,4 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
@@ -83,7 +83,7 @@ namespace GitHub.Runner.Worker.Container.ContainerHooks
public HookContainer(ContainerInfo container)
{
Image = container.ContainerImage;
EntryPointArgs = container.ContainerEntryPointArgs?.Split(' ').Select(arg => arg.Trim()).Where(arg => !string.IsNullOrEmpty(arg)) ?? new List<string>();
EntryPointArgs = container.ContainerEntryPointArgs?.Split(' ').Select(arg => arg.Trim()) ?? new List<string>();
EntryPoint = container.ContainerEntryPoint;
WorkingDirectory = container.ContainerWorkDirectory;
CreateOptions = container.ContainerCreateOptions;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using Newtonsoft.Json.Linq;

View File

@@ -96,7 +96,7 @@ namespace GitHub.Runner.Worker.Container
// https://docs.microsoft.com/en-us/dotnet/api/system.environment.getcommandlineargs?redirectedfrom=MSDN&view=net-6.0#remarks
// First, find any \ followed by a " and double the number of \ + 1.
value = QuoteEscape.Replace(value, @"$1$1\" + "\"");
value = QuoteEscape.Replace(value, @"$1$1\" + "\"");
// Next, what if it ends in `\`, it would escape the end quote. So, we need to detect that at the end of the string and perform the same escape
// Luckily, we can just use the $ character with detects the end of string in regex
value = EndOfStringEscape.Replace(value, @"$1$1");

View File

@@ -32,7 +32,7 @@ namespace GitHub.Runner.Worker
{
private static string DateTimeFormat = "yyyyMMdd-HHmmss";
public void UploadDiagnosticLogs(IExecutionContext executionContext,
IExecutionContext parentContext,
IExecutionContext parentContext,
Pipelines.AgentJobRequestMessage message,
DateTime jobStartTimeUtc)
{

View File

@@ -18,22 +18,15 @@ using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Worker.Handlers;
using Newtonsoft.Json;
using Sdk.RSWebApi.Contracts;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
public static class ExecutionContextType
public class ExecutionContextType
{
public const string Job = "Job";
public const string Task = "Task";
}
public record ExecutionContextLogOptions(bool WriteToLog, string LogMessageOverride)
{
public static readonly ExecutionContextLogOptions None = new(false, null);
public static readonly ExecutionContextLogOptions Default = new(true, null);
public static string Job = "Job";
public static string Task = "Task";
}
[ServiceLocator(Default = typeof(ExecutionContext))]
@@ -88,7 +81,7 @@ namespace GitHub.Runner.Worker
// logging
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
// timeline record update methods
void Start(string currentOperation = null);
@@ -99,7 +92,7 @@ namespace GitHub.Runner.Worker
void SetGitHubContext(string name, string value);
void SetOutput(string name, string value, out string reference);
void SetTimeout(TimeSpan? timeout);
void AddIssue(Issue issue, ExecutionContextLogOptions logOptions);
void AddIssue(Issue issue, string message = null);
void Progress(int percentage, string currentOperation = null);
void UpdateDetailTimelineRecord(TimelineRecord record);
@@ -125,7 +118,7 @@ namespace GitHub.Runner.Worker
public sealed class ExecutionContext : RunnerService, IExecutionContext
{
private const int _maxCountPerIssueType = 10;
private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private const int _maxIssueMessageLength = 4096; // Don't send issue with huge message since we can't forward them from actions to check annotation.
private const int _maxIssueCountInTelemetry = 3; // Only send the first 3 issues to telemetry
@@ -133,10 +126,8 @@ namespace GitHub.Runner.Worker
private readonly TimelineRecord _record = new();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new();
private readonly List<Issue> _embeddedIssueCollector;
private readonly object _loggerLock = new();
private readonly object _matchersLock = new();
private readonly ExecutionContext _parentExecutionContext;
private event OnMatcherChanged _onMatcherChanged;
@@ -144,6 +135,7 @@ namespace GitHub.Runner.Worker
private IPagingLogger _logger;
private IJobServerQueue _jobServerQueue;
private ExecutionContext _parentExecutionContext;
private Guid _mainTimelineId;
private Guid _detailTimelineId;
@@ -157,29 +149,6 @@ namespace GitHub.Runner.Worker
private long _totalThrottlingDelayInMilliseconds = 0;
private bool _stepTelemetryPublished = false;
public ExecutionContext()
: this(parent: null, embedded: false)
{
}
private ExecutionContext(ExecutionContext parent, bool embedded)
{
if (embedded)
{
ArgUtil.NotNull(parent, nameof(parent));
}
_parentExecutionContext = parent;
this.IsEmbedded = embedded;
this.StepTelemetry = new ActionsStepTelemetry
{
IsEmbedded = embedded
};
//Embedded Execution Contexts pseudo-inherit their parent's embeddedIssueCollector.
_embeddedIssueCollector = embedded ? parent._embeddedIssueCollector : new();
}
public Guid Id => _record.Id;
public Guid EmbeddedId { get; private set; }
public string ScopeName { get; private set; }
@@ -192,7 +161,7 @@ namespace GitHub.Runner.Worker
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
public ActionsStepTelemetry StepTelemetry { get; private init; }
public ActionsStepTelemetry StepTelemetry { get; } = new ActionsStepTelemetry();
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
@@ -217,7 +186,7 @@ namespace GitHub.Runner.Worker
// An embedded execution context shares the same record ID, record name, and logger
// as its enclosing execution context.
public bool IsEmbedded { get; private init; }
public bool IsEmbedded { get; private set; }
public TaskResult? Result
{
@@ -352,7 +321,7 @@ namespace GitHub.Runner.Worker
{
Trace.Entering();
var child = new ExecutionContext(this, isEmbedded);
var child = new ExecutionContext();
child.Initialize(HostContext);
child.Global = Global;
child.ScopeName = scopeName;
@@ -377,6 +346,7 @@ namespace GitHub.Runner.Worker
child.ExpressionFunctions.Add(item);
}
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
child._parentExecutionContext = this;
child.EchoOnActionCommand = EchoOnActionCommand;
if (recordOrder != null)
@@ -397,9 +367,11 @@ namespace GitHub.Runner.Worker
child._logger.Setup(_mainTimelineId, recordId);
}
child.IsEmbedded = isEmbedded;
child.StepTelemetry.StepId = recordId;
child.StepTelemetry.Stage = stage.ToString();
child.StepTelemetry.StepContextName = child.GetFullyQualifiedContextName();
child.StepTelemetry.IsEmbedded = isEmbedded;
child.StepTelemetry.StepContextName = child.GetFullyQualifiedContextName(); ;
return child;
}
@@ -441,24 +413,13 @@ namespace GitHub.Runner.Worker
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
}
DateTime now = DateTime.UtcNow;
_record.CurrentOperation = currentOperation ?? _record.CurrentOperation;
_record.ResultCode = resultCode ?? _record.ResultCode;
_record.FinishTime = now;
_record.FinishTime = DateTime.UtcNow;
_record.PercentComplete = 100;
_record.Result = _record.Result ?? TaskResult.Succeeded;
_record.State = TimelineRecordState.Completed;
// Before our main timeline's final QueueTimelineRecordUpdate,
// inject any issues collected by embedded ExecutionContexts.
if (!this.IsEmbedded)
{
foreach (var issue in _embeddedIssueCollector)
{
AddIssue(issue, ExecutionContextLogOptions.None);
}
}
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
// complete all detail timeline records.
@@ -466,7 +427,7 @@ namespace GitHub.Runner.Worker
{
foreach (var record in _detailRecords)
{
record.Value.FinishTime = record.Value.FinishTime ?? now;
record.Value.FinishTime = record.Value.FinishTime ?? DateTime.UtcNow;
record.Value.PercentComplete = record.Value.PercentComplete ?? 100;
record.Value.Result = record.Value.Result ?? TaskResult.Succeeded;
record.Value.State = TimelineRecordState.Completed;
@@ -477,26 +438,14 @@ namespace GitHub.Runner.Worker
PublishStepTelemetry();
var stepResult = new StepResult
{
ExternalID = _record.Id,
Conclusion = _record.Result ?? TaskResult.Succeeded,
Status = _record.State,
Number = _record.Order,
Name = _record.Name,
StartedAt = _record.StartTime,
CompletedAt = _record.FinishTime,
Annotations = new List<Annotation>()
};
_record.Issues?.ForEach(issue =>
{
var annotation = issue.ToAnnotation();
if (annotation != null)
{
stepResult.Annotations.Add(annotation.Value);
}
});
var stepResult = new StepResult();
stepResult.ExternalID = _record.Id;
stepResult.Conclusion = _record.Result ?? TaskResult.Succeeded;
stepResult.Status = _record.State;
stepResult.Number = _record.Order;
stepResult.Name = _record.Name;
stepResult.StartedAt = _record.StartTime;
stepResult.CompletedAt = _record.FinishTime;
Global.StepsResult.Add(stepResult);
@@ -610,10 +559,14 @@ namespace GitHub.Runner.Worker
}
// This is not thread safe, the caller need to take lock before calling issue()
public void AddIssue(Issue issue, ExecutionContextLogOptions logOptions)
public void AddIssue(Issue issue, string logMessage = null)
{
ArgUtil.NotNull(issue, nameof(issue));
ArgUtil.NotNull(logOptions, nameof(logOptions));
if (string.IsNullOrEmpty(logMessage))
{
logMessage = issue.Message;
}
issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message);
if (issue.Message.Length > _maxIssueMessageLength)
@@ -628,64 +581,53 @@ namespace GitHub.Runner.Worker
issue.Data["stepNumber"] = _record.Order.ToString();
}
string wellKnownTag = null;
Int32 previousCountForIssueType = 0;
Action incrementIssueTypeCount = NoOp;
switch (issue.Type)
if (issue.Type == IssueType.Error)
{
case IssueType.Error:
wellKnownTag = WellKnownTags.Error;
previousCountForIssueType = _record.ErrorCount;
incrementIssueTypeCount = () => { _record.ErrorCount++; };
break;
case IssueType.Warning:
wellKnownTag = WellKnownTags.Warning;
previousCountForIssueType = _record.WarningCount;
incrementIssueTypeCount = () => { _record.WarningCount++; };
break;
case IssueType.Notice:
wellKnownTag = WellKnownTags.Notice;
previousCountForIssueType = _record.NoticeCount;
incrementIssueTypeCount = () => { _record.NoticeCount++; };
break;
}
if (!string.IsNullOrEmpty(wellKnownTag))
{
if (!this.IsEmbedded && previousCountForIssueType < _maxCountPerIssueType)
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Error, logMessage);
issue.Data["logFileLineNumber"] = logLineNumber.ToString();
}
if (_record.ErrorCount < _maxIssueCount)
{
incrementIssueTypeCount();
_record.Issues.Add(issue);
}
if (logOptions.WriteToLog)
_record.ErrorCount++;
}
else if (issue.Type == IssueType.Warning)
{
if (!string.IsNullOrEmpty(logMessage))
{
string logMessage = issue.Message;
if (!string.IsNullOrEmpty(logOptions.LogMessageOverride))
{
logMessage = logOptions.LogMessageOverride;
}
if (!string.IsNullOrEmpty(logMessage))
{
// Note that ::Write() has its own secret-masking logic.
long logLineNumber = Write(wellKnownTag, logMessage);
issue.Data["logFileLineNumber"] = logLineNumber.ToString();
}
long logLineNumber = Write(WellKnownTags.Warning, logMessage);
issue.Data["logFileLineNumber"] = logLineNumber.ToString();
}
if (_record.WarningCount < _maxIssueCount)
{
_record.Issues.Add(issue);
}
_record.WarningCount++;
}
else if (issue.Type == IssueType.Notice)
{
if (!string.IsNullOrEmpty(logMessage))
{
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
issue.Data["logFileLineNumber"] = logLineNumber.ToString();
}
if (_record.NoticeCount < _maxIssueCount)
{
_record.Issues.Add(issue);
}
_record.NoticeCount++;
}
// Embedded ExecutionContexts (a.k.a. Composite actions) should never upload a timeline record to the server.
// Instead, we store processed issues on a shared (psuedo-inherited) list (belonging to the closest
// non-embedded ancestor ExecutionContext) so that they can be processed when that ancestor completes.
if (this.IsEmbedded)
{
_embeddedIssueCollector.Add(issue);
}
else
{
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
}
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
}
public void UpdateDetailTimelineRecord(TimelineRecord record)
@@ -783,9 +725,6 @@ namespace GitHub.Runner.Worker
// Steps results for entire job
Global.StepsResult = new List<StepResult>();
// Job level annotations
Global.JobAnnotations = new List<Annotation>();
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
@@ -802,9 +741,6 @@ namespace GitHub.Runner.Worker
// File table
Global.FileTable = new List<String>(message.FileTable ?? new string[0]);
// What type of job request is running (i.e. Run Service vs. pipelines)
Global.Variables.Set(Constants.Variables.System.JobRequestType, message.MessageType);
// Expression values
if (message.ContextData?.Count > 0)
{
@@ -935,7 +871,8 @@ namespace GitHub.Runner.Worker
{
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
_jobServerQueue.QueueSummaryUpload(stepRecordId, name, filePath, deleteSource: false);
}
// Add OnMatcherChanged
@@ -1059,7 +996,8 @@ namespace GitHub.Runner.Worker
StepTelemetry.FinishTime = _record.FinishTime;
}
if (!IsEmbedded)
if (!IsEmbedded &&
_record.Issues.Count > 0)
{
foreach (var issue in _record.Issues)
{
@@ -1225,11 +1163,6 @@ namespace GitHub.Runner.Worker
UpdateGlobalStepsContext();
}
private static void NoOp()
{
}
}
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
@@ -1254,22 +1187,19 @@ namespace GitHub.Runner.Worker
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void Error(this IExecutionContext context, string message)
{
var issue = new Issue() { Type = IssueType.Error, Message = message };
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message });
}
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void InfrastructureError(this IExecutionContext context, string message)
{
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true };
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true });
}
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void Warning(this IExecutionContext context, string message)
{
var issue = new Issue() { Type = IssueType.Warning, Message = message };
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(new Issue() { Type = IssueType.Warning, Message = message });
}
// Do not add a format string overload. See comment on ExecutionContext.Write().
@@ -1401,7 +1331,7 @@ namespace GitHub.Runner.Worker
public void Error(string format, params Object[] args)
{
_executionContext.Error("error from template trace writer: " + string.Format(CultureInfo.CurrentCulture, format, args));
_executionContext.Error(string.Format(CultureInfo.CurrentCulture, format, args));
}
public void Info(string format, params Object[] args)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
@@ -136,4 +136,4 @@ namespace GitHub.Runner.Worker.Expressions
}
}
}
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Common;
using System;
@@ -281,7 +281,7 @@ namespace GitHub.Runner.Worker
}
}
public sealed class EnvFileKeyValuePairs : IEnumerable<KeyValuePair<string, string>>
public sealed class EnvFileKeyValuePairs: IEnumerable<KeyValuePair<string, string>>
{
private IExecutionContext _context;
private string _filePath;

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ContextData;
using System;
using System.Collections.Generic;

Some files were not shown because too many files have changed in this diff Show More