Compare commits

..

2 Commits

Author SHA1 Message Date
Tingluo Huang
f764f5dfa7 Release 2.301.1 runner with reverting #2333 (#2384)
* Revert "split by regex (#2333)"

This reverts commit 72830cfc12.

* Release 2.301.1 runner.
2023-01-18 19:38:19 -05:00
Tingluo Huang
e4716930a9 Create 2.301.0 runner release. 2023-01-18 15:04:38 -05:00
95 changed files with 665 additions and 3384 deletions

View File

@@ -5,7 +5,7 @@
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.405"
"version": "6.0.300"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "16"

View File

@@ -27,7 +27,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
@@ -38,4 +38,4 @@ jobs:
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1

View File

@@ -18,6 +18,7 @@ jobs:
uses: github/super-linter@v4
env:
DEFAULT_BRANCH: ${{ github.base_ref }}
DISABLE_ERRORS: true
EDITORCONFIG_FILE_NAME: .editorconfig
LINTER_RULES_PATH: /src/
VALIDATE_ALL_CODEBASE: false

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
shell: bash
id: sha
name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noexternals
name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime
name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256

View File

@@ -1,65 +0,0 @@
# ADR 2494: Runner Image Tags
**Date**: 2023-03-17
**Status**: Accepted<!-- |Accepted|Rejected|Superceded|Deprecated -->
## Context
Following the [adoption of actions-runner-controller by GitHub](https://github.com/actions/actions-runner-controller/discussions/2072) and the introduction of the new runner scale set autoscaling mode, we needed to provide a basic runner image that could be used off the shelf without much friction.
The [current runner image](https://github.com/actions/runner/pkgs/container/actions-runner) is published to GHCR. Each release of this image is tagged with the runner version and the most recent release is also tagged with `latest`.
While the use of `latest` is common practice, we recommend that users pin a specific version of the runner image for a predictable runtime and improved security posture. However, we still notice that a large number of end users are relying on the `latest` tag & raising issues when they encounter problems.
Add to that, the community actions-runner-controller maintainers have issued a [deprecation notice](https://github.com/actions/actions-runner-controller/issues/2056) of the `latest` tag for the existing runner images (https://github.com/orgs/actions-runner-controller/packages).
## Decision
Proceed with Option 2, keeping the `latest` tag and adding the `NOTES.txt` file to our helm charts with the notice.
### Option 1: Remove the `latest` tag
By removing the `latest` tag, we have to proceed with either of these options:
1. Remove the runner image reference in the `values.yaml` provided with the `gha-runner-scale-set` helm chart and mark these fields as required so that users have to explicitly specify a runner image and a specific tag. This will obviously introduce more friction for users who want to start using actions-runner-controller for the first time.
```yaml
spec:
containers:
- name: runner
image: ""
tag: ""
command: ["/home/runner/run.sh"]
```
1. Pin a specific runner image tag in the `values.yaml` provided with the `gha-runner-scale-set` helm chart. This will reduce friction for users who want to start using actions-runner-controller for the first time but will require us to update the `values.yaml` with every new runner release.
```yaml
spec:
containers:
- name: runner
image: "ghcr.io/actions/actions-runner"
tag: "v2.300.0"
command: ["/home/runner/run.sh"]
```
### Option 2: Keep the `latest` tag
Keeping the `latest` tag is also a reasonable option especially if we don't expect to make any breaking changes to the runner image. We could enhance this by adding a [NOTES.txt](https://helm.sh/docs/chart_template_guide/notes_files/) to the helm chart which will be displayed to the user after a successful helm install/upgrade. This will help users understand the implications of using the `latest` tag and how to pin a specific version of the runner image.
The runner image release workflow will need to be updated so that the image is pushed to GHCR and tagged only when the runner rollout has reached all scale units.
## Consequences
Proceeding with **option 1** means:
1. We will enhance the runtime predictability and security posture of our end users
1. We will have to update the `values.yaml` with every new runner release (that can be automated)
1. We will introduce friction for users who want to start using actions-runner-controller for the first time
Proceeding with **option 2** means:
1. We will have to continue to maintain the `latest` tag
1. We will assume that end users will be able to handle the implications of using the `latest` tag
1. Runner image release workflow needs to be updated

View File

@@ -158,11 +158,3 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
### Format C# Code
To format both staged and unstaged .cs files
```
cd ./src
./dev.(cmd|sh) format
```

View File

@@ -2,8 +2,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
ARG DOCKER_VERSION=20.10.23
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.2.0
RUN apt update -y && apt install curl unzip -y
@@ -16,34 +15,11 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
&& unzip ./runner-container-hooks.zip -d ./k8s \
&& rm runner-container-hooks.zip
RUN export DOCKER_ARCH=x86_64 \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
&& curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
&& tar zxvf docker.tgz \
&& rm -rf docker.tgz
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_ALLOW_RUNASROOT=1
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner
WORKDIR /actions-runner
COPY --from=build /actions-runner .

View File

@@ -1,17 +1,19 @@
## Features
- Support matrix context in output keys (#2477)
- Add update certificates to `./run.sh` if `RUNNER_UPDATE_CA_CERTS` env is set (#2471)
- Bypass all proxies for all hosts if `no_proxy='*'` is set (#2395)
- Change runner image to make user/folder align with `ubuntu-latest` hosted runner. (#2469)
- Log GitHub RequestId for better traceability (#2332)
- Dual upload summary to Actions and Result service (#2334)
- Allow providing extra User-Agent for better correlation (#2370)
- Show more information in the runner log (#2377)
- New option to remove local config files (#2367)
## Bugs
- Exit on runner version deprecation error (#2299)
- Runner service exit after consecutive re-try exits (#2426)
- Treat jitconfig as secret (#2335)
- Add Header/Footer to multi-line message in StdoutTraceListener (#2336)
- Update Node dependencies (#2381)
## Misc
- Replace deprecated command with environment file (#2429)
- Make requests to `Run` service to renew job request (#2461)
- Add job/step log upload to Result service (#2447, #2439)
- Make runner image print diag log to STDOUT (#2331)
- Update Node.js to 16.16.0 (#2371)
- Add a disclaimer for which runner version is available to a given tenant (#2362)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release>
2.301.1

View File

@@ -1 +1 @@
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602
1d709d93e5d3c6c6c656a61aa6c1781050224788a05b0e6ecc4c3c0408bdf89c

View File

@@ -1 +1 @@
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5
b92a47cfeaad02255b1f7a377060651b73ae5e5db22a188dbbcb4183ab03a03d

View File

@@ -1 +1 @@
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c
68a9a8ef0843a8bb74241894f6f63fd76241a82295c5337d3cc7a940a314c78e

View File

@@ -1 +1 @@
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e
02c7126ff4d63ee2a0ae390c81434c125630522aadf35903bbeebb1a99d8af99

View File

@@ -1 +1 @@
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c
c9d5a542f8d765168855a89e83ae0a8970d00869041c4f9a766651c04c72b212

View File

@@ -1 +1 @@
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673
39d0683f0f115a211cb10c473e9574c16549a19d4e9a6c637ded3d7022bf809f

View File

@@ -1 +1 @@
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3
d94f2fbaf210297162bc9f3add819d73682c3aa6899e321c3872412b924d5504

View File

@@ -24,8 +24,7 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -63,8 +62,7 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -94,38 +92,24 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
unknownFailureRetryCount >= exitServiceAfterNFailures
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
stopping = true
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

View File

@@ -18,20 +18,6 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Wait for docker to start
if [ ! -z "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" ]; then
if [ "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" -gt 0 ]; then
echo "Waiting for docker to be ready."
for i in $(seq "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS"); do
if docker ps > /dev/null 2>&1; then
echo "Docker is ready."
break
fi
"$DIR"/safe_sleep.sh 1
done
fi
fi
updateFile="update.finished"
"$DIR"/bin/Runner.Listener run $*

View File

@@ -53,33 +53,6 @@ runWithManualTrap() {
done
}
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $*
else

View File

@@ -74,7 +74,6 @@ Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll

View File

@@ -1,56 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(BrokerServer))]
public interface IBrokerServer : IRunnerService
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
}
public sealed class BrokerServer : RunnerService, IBrokerServer
{
private bool _hasConnection;
private Uri _brokerUri;
private RawConnection _connection;
private BrokerHttpClient _brokerHttpClient;
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
{
_brokerUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_brokerHttpClient = await _connection.GetClientAsync<BrokerHttpClient>();
_hasConnection = true;
}
private void CheckConnection()
{
if (!_hasConnection)
{
throw new InvalidOperationException($"SetConnection");
}
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{
CheckConnection();
var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage;
}
}
}

View File

@@ -50,12 +50,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrlV2 { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
@@ -53,7 +53,7 @@ namespace GitHub.Runner.Common
private static int[] _vssHttpCredentialEventIds = new int[] { 11, 13, 14, 15, 16, 17, 18, 20, 21, 22, 27, 29 };
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new();
private readonly ISecretMasker _secretMasker;
private readonly ISecretMasker _secretMasker = new SecretMasker();
private readonly List<ProductInfoHeaderValue> _userAgents = new() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private CancellationTokenSource _runnerShutdownTokenSource = new();
private object _perfLock = new();
@@ -82,20 +82,17 @@ namespace GitHub.Runner.Common
_loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly);
_loadContext.Unloading += LoadContext_Unloading;
var masks = new List<ValueEncoder>()
{
ValueEncoders.EnumerateBase64Variations,
ValueEncoders.CommandLineArgumentEscape,
ValueEncoders.ExpressionStringEscape,
ValueEncoders.JsonStringEscape,
ValueEncoders.UriDataEscape,
ValueEncoders.XmlDataEscape,
ValueEncoders.TrimDoubleQuotes,
ValueEncoders.PowerShellPreAmpersandEscape,
ValueEncoders.PowerShellPostAmpersandEscape
};
_secretMasker = new SecretMasker(masks);
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift1);
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift2);
this.SecretMasker.AddValueEncoder(ValueEncoders.CommandLineArgumentEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.ExpressionStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPreAmpersandEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPostAmpersandEscape);
// Create StdoutTraceListener if ENV is set
StdoutTraceListener stdoutTraceListener = null;
@@ -223,7 +220,7 @@ namespace GitHub.Runner.Common
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile, true);
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}

View File

@@ -24,11 +24,13 @@ namespace GitHub.Runner.Common
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
void InitializeResultsClient(Uri uri, string token);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -42,6 +44,7 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
@@ -145,6 +148,12 @@ namespace GitHub.Runner.Common
InitializeWebsocketClient(TimeSpan.Zero);
}
public void InitializeResultsClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
@@ -307,6 +316,15 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{

View File

@@ -20,7 +20,7 @@ namespace GitHub.Runner.Common
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -31,7 +31,7 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForSummaryUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Common
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
private readonly ConcurrentQueue<SummaryUploadFileInfo> _summaryFileUploadQueue = new();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
@@ -60,12 +60,11 @@ namespace GitHub.Runner.Common
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _summaryUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
private IJobServer _jobServer;
private IResultsServer _resultsServer;
private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
@@ -85,14 +84,10 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_jobServer = hostContext.GetService<IJobServer>();
_resultsServer = hostContext.GetService<IResultsServer>();
}
public void Start(Pipelines.AgentJobRequestMessage jobRequest)
@@ -113,10 +108,10 @@ namespace GitHub.Runner.Common
!string.IsNullOrEmpty(resultsReceiverEndpoint))
{
Trace.Info("Initializing results client");
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
_resultsClientInitiated = true;
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
}
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -145,12 +140,12 @@ namespace GitHub.Runner.Common
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
_summaryUploadDequeueTask = ProcessSummaryUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _summaryUploadDequeueTask };
_queueInProcess = true;
}
@@ -181,9 +176,9 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
Trace.Verbose("Draining results summary upload queue.");
await ProcessSummaryUploadQueueAsync(runOnce: true);
Trace.Info("Results summary upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
@@ -235,43 +230,21 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
public void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new ResultsUploadFileInfo()
var newFile = new SummaryUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
StepId = stepRecordId.ToString(),
DeleteSource = deleteSource
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, stepRecordId);
_summaryFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
@@ -464,18 +437,18 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
private async Task ProcessSummaryUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
List<SummaryUploadFileInfo> filesToUpload = new();
SummaryUploadFileInfo dequeueFile;
while (_summaryFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file uploads.
// process at most 10 file upload.
if (!runOnce && filesToUpload.Count > 10)
{
break;
@@ -486,7 +459,7 @@ namespace GitHub.Runner.Common
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
Trace.Info($"Uploading {filesToUpload.Count} summary files in one shot through results service.");
}
int errorCount = 0;
@@ -494,38 +467,30 @@ namespace GitHub.Runner.Common
{
try
{
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
await UploadSummaryFile(file);
}
catch (Exception ex)
{
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during summary file upload to results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
Trace.Info("Catch exception during summary file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
}
finally
{
errorCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
_resultsClientInitiated = false;
SendResultsTelemetry(ex);
}
}
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
Trace.Info("Tried to upload {0} summary files to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
@@ -534,24 +499,11 @@ namespace GitHub.Runner.Common
}
else
{
await Task.Delay(_delayForResultsUploadDequeue);
await Task.Delay(_delayForSummaryUploadDequeue);
}
}
}
private void SendResultsTelemetry(Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
}
private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false)
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
@@ -622,22 +574,6 @@ namespace GitHub.Runner.Common
try
{
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
try
{
if (_resultsClientInitiated)
{
await _resultsServer.UpdateResultsWorkflowStepsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
}
catch (Exception e)
{
Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e);
_resultsClientInitiated = false;
SendResultsTelemetry(e);
}
if (_bufferedRetryRecords.Remove(update.TimelineId))
{
Trace.Verbose("Cleanup buffered timeline record for timeline: {0}.", update.TimelineId);
@@ -840,50 +776,16 @@ namespace GitHub.Runner.Common
}
}
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
private async Task UploadSummaryFile(SummaryUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _resultsServer.CreateResultsStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _resultsServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _resultsServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
if (!_resultsClientInitiated)
{
return;
}
bool uploadSucceed = false;
try
{
await uploadHandler(file);
// Upload the step summary
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateStepSymmaryAsync(file.PlanId, file.JobId, file.StepId, file.Path, cancellationTokenSource.Token);
uploadSucceed = true;
}
finally
@@ -896,7 +798,7 @@ namespace GitHub.Runner.Common
}
catch (Exception ex)
{
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Info("Catch exception during delete success results uploaded summary file.");
Trace.Error(ex);
}
}
@@ -920,20 +822,18 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class ResultsUploadFileInfo
internal class SummaryUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public Guid RecordId { get; set; }
public string StepId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}
internal class ConsoleLineInfo
{
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)

View File

@@ -21,12 +21,6 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -38,13 +32,6 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -52,10 +39,8 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -75,17 +60,11 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
Create();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -99,25 +78,21 @@ namespace GitHub.Runner.Common
}
}
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
EndBlock(true);
}
private void Create()
{
NewPage();
}
private void NewPage()
@@ -142,27 +117,5 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -1,98 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Results.Client;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService
{
void InitializeResultsClient(Uri uri, string token);
// logging and console
Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken);
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
}
public sealed class ResultServer : RunnerService, IResultsServer
{
private ResultsHttpClient _resultsClient;
public void InitializeResultsClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock,
lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
try
{
var timelineRecords = records.ToList();
return _resultsClient.UpdateWorkflowStepsAsync(planId, new List<TimelineRecord>(timelineRecords),
cancellationToken: cancellationToken);
}
catch (Exception ex)
{
// Log error, but continue as this call is best-effort
Trace.Info($"Failed to update steps status due to {ex.GetType().Name}");
Trace.Error(ex);
}
}
throw new InvalidOperationException("Results client is not initialized.");
}
}
}

View File

@@ -1,13 +1,11 @@
using System;
using System.Collections.Generic;
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using GitHub.Services.WebApi;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
@@ -18,10 +16,6 @@ namespace GitHub.Runner.Common
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
@@ -35,7 +29,7 @@ namespace GitHub.Runner.Common
{
requestUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_connection = VssUtil.CreateRawConnection(new Uri(serverUri.Authority), credentials);
_runServiceHttpClient = await _connection.GetClientAsync<RunServiceHttpClient>();
_hasConnection = true;
}
@@ -61,24 +55,5 @@ namespace GitHub.Runner.Common
return jobMessage;
}
public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
var renewJobResponse = RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
if (renewJobResponse == null)
{
throw new TaskOrchestrationJobNotFoundException(jobId.ToString());
}
return renewJobResponse;
}
}
}

View File

@@ -1,237 +0,0 @@
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Linq;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService
{
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
}
public enum RequestType
{
Get,
Post,
Patch,
Delete
}
public class RunnerDotcomServer : RunnerService, IRunnerDotcomServer
{
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = hostContext.GetService<ITerminal>();
}
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
}
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
return agentPools?.ToAgentPoolList();
}
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
string githubApiUrl;
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runners/register";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runners/register";
}
var bodyObject = new Dictionary<string, Object>()
{
{"url", githubUrl},
{"group_id", runnerGroupId},
{"name", agent.Name},
{"version", agent.Version},
{"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral},
{"labels", agent.Labels},
{"public_key", publicKey}
};
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
}
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
{
int retry = 0;
while (true)
{
retry++;
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
HttpResponseMessage response = null;
if (requestType == RequestType.Get)
{
response = await httpClient.GetAsync(githubApiUrl);
}
else
{
response = await httpClient.PostAsync(githubApiUrl, body);
}
if (response != null)
{
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<T>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
}
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{
Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex);
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
}
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -68,19 +68,6 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException(nameof(EstablishVssConnection));
}
protected async Task RetryRequest(Func<Task> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
)
{
async Task<Unit> wrappedFunc()
{
await func();
return Unit.Value;
}
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
}
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
@@ -98,7 +85,7 @@ namespace GitHub.Runner.Common
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
{
Trace.Error("Catch exception during request");
Trace.Error("Catch exception during get full job message");
Trace.Error(ex);
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");

View File

@@ -1,8 +0,0 @@
// Represents absence of value.
namespace GitHub.Runner.Common
{
public readonly struct Unit
{
public static readonly Unit Value = default;
}
}

View File

@@ -1,14 +0,0 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -1,209 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Runner.Common.Util;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener
{
public sealed class BrokerMessageListener : RunnerService, IMessageListener
{
private RunnerSettings _settings;
private ITerminal _term;
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private IBrokerServer _brokerServer;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_brokerServer = HostContext.GetService<IBrokerServer>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
await RefreshBrokerConnection();
return await Task.FromResult(true);
}
public async Task DeleteSessionAsync()
{
await Task.CompletedTask;
}
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
{
bool encounteringError = false;
int continuousError = 0;
Stopwatch heartbeat = new();
heartbeat.Restart();
var maxRetryCount = 10;
while (true)
{
TaskAgentMessage message = null;
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try
{
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
if (message == null)
{
continue;
}
return message;
}
catch (OperationCanceledException) when (_getMessagesTokenSource.Token.IsCancellationRequested && !token.IsCancellationRequested)
{
Trace.Info("Get messages has been cancelled using local token source. Continue to get messages with new status.");
continue;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Get next message has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
Trace.Error(ex);
if (!IsGetNextMessageExceptionRetriable(ex))
{
throw;
}
else
{
continuousError++;
//retry after a random backoff to avoid service throttling
//in case of there is a service error happened and all agents get kicked off of the long poll and all agent try to reconnect back at the same time.
if (continuousError <= 5)
{
// random backoff [15, 30]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
}
else if (continuousError >= maxRetryCount)
{
throw;
}
else
{
// more aggressive backoff [30, 60]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60), _getNextMessageRetryInterval);
}
if (!encounteringError)
{
//print error only on the first consecutive error
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
// re-create VssConnection before next retry
await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
{
_getMessagesTokenSource.Dispose();
}
if (message == null)
{
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
{
Trace.Info($"No message retrieved within last 30 minutes.");
heartbeat.Restart();
}
else
{
Trace.Verbose($"No message retrieved.");
}
continue;
}
Trace.Info($"Message '{message.MessageId}' received.");
}
}
public async Task DeleteMessageAsync(TaskAgentMessage message)
{
await Task.CompletedTask;
}
private bool IsGetNextMessageExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException ||
ex is TaskAgentPoolNotFoundException ||
ex is TaskAgentSessionExpiredException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnection()
{
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
if (_settings.ServerUrlV2 == null)
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
}
}
}

View File

@@ -31,14 +31,12 @@ namespace GitHub.Runner.Listener.Configuration
{
private IConfigurationStore _store;
private IRunnerServer _runnerServer;
private IRunnerDotcomServer _dotcomServer;
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_runnerServer = HostContext.GetService<IRunnerServer>();
_dotcomServer = HostContext.GetService<IRunnerDotcomServer>();
Trace.Verbose("Creating _store");
_store = hostContext.GetService<IConfigurationStore>();
Trace.Verbose("store created");
@@ -115,7 +113,6 @@ namespace GitHub.Runner.Listener.Configuration
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
string registerToken = string.Empty;
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
@@ -133,11 +130,9 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow;
_term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -181,11 +176,9 @@ namespace GitHub.Runner.Listener.Configuration
// We want to use the native CSP of the platform for storage, so we use the RSACSP directly
RSAParameters publicKey;
var keyManager = HostContext.GetService<IRSAKeyManager>();
string publicKeyXML;
using (var rsa = keyManager.CreateKey())
{
publicKey = rsa.ExportParameters(false);
publicKeyXML = rsa.ToXmlString(includePrivateParameters: false);
}
_term.WriteSection("Runner Registration");
@@ -193,17 +186,9 @@ namespace GitHub.Runner.Listener.Configuration
// If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null;
TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools;
if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
else
{
agentPools = await _runnerServer.GetAgentPoolsAsync();
}
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
{
poolName = command.GetRunnerGroupName(defaultPool?.Name);
@@ -241,16 +226,8 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels();
_term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
else
{
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
}
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
if (agent != null)
@@ -297,23 +274,7 @@ namespace GitHub.Runner.Listener.Configuration
try
{
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
}
else
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
}
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
@@ -364,28 +325,24 @@ namespace GitHub.Runner.Listener.Configuration
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
if (!runnerSettings.UseV2Flow)
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
_term.WriteSection("Runner settings");
@@ -695,7 +652,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -758,7 +715,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -787,5 +744,14 @@ namespace GitHub.Runner.Listener.Configuration
}
return null;
}
private string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Runner.Common;
@@ -20,8 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
{
public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase)
{
{ Constants.Configuration.OAuth, typeof(OAuthCredential) },
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential) },
{ Constants.Configuration.OAuth, typeof(OAuthCredential)},
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)},
};
public ICredentialProvider GetCredentialProvider(string credType)
@@ -93,9 +93,6 @@ namespace GitHub.Runner.Listener.Configuration
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials()
{
ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema));

View File

@@ -1,3 +0,0 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,7 +7,6 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -59,8 +58,6 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -89,8 +86,6 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -244,13 +239,6 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -379,11 +367,9 @@ namespace GitHub.Runner.Listener
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -440,7 +426,7 @@ namespace GitHub.Runner.Listener
{
workerOutput.Add(stdout.Data);
}
if (printToStdout)
{
term.WriteLine(stdout.Data, skipTracing: true);
@@ -522,6 +508,7 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -544,8 +531,11 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var jobServer = await InitializeJobServerAsync(systemConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
@@ -685,128 +675,9 @@ namespace GitHub.Runner.Listener
}
}
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -969,93 +840,90 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var server = await InitializeJobServerAsync(systemConnection);
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
if (server is IJobServer jobServer)
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
foreach (var pages in logPages)
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (updatedRecords.Count > 0)
if (!int.TryParse(logNameParts[2], out pageNumber))
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
else
foreach (var pages in logPages)
{
Trace.Info("Job server does not support log upload yet.");
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
catch (Exception ex)
@@ -1075,12 +943,6 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
@@ -1117,117 +979,66 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
if (server is IJobServer jobServer)
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
{
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
}
catch (Exception ex)
{
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
else
catch (Exception ex)
{
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
return;
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
{
if (server is IJobServer jobServer)
try
{
try
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
else if (server is IRunServer runServer)
catch (Exception ex)
{
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable

View File

@@ -182,7 +182,7 @@ namespace GitHub.Runner.Listener
try
{
_getMessagesTokenSource?.Cancel();
}
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
@@ -245,10 +245,6 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true;
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
@@ -293,7 +289,7 @@ namespace GitHub.Runner.Listener
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
finally
{
_getMessagesTokenSource.Dispose();
}

View File

@@ -6,7 +6,6 @@ using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener
{
@@ -59,7 +58,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#if ARM64
#if ARM64
// A little hacky, but windows gives no way to differentiate between windows 10 and 11.
// By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11
// https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm
@@ -70,7 +69,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#endif
#endif
break;
default:
terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}.");
@@ -138,12 +137,6 @@ namespace GitHub.Runner.Listener
}
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e)
{
terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -4,13 +4,11 @@ using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -138,7 +136,7 @@ namespace GitHub.Runner.Listener
if (command.Remove)
{
// only remove local config files and exit
if (command.RemoveLocalConfig)
if(command.RemoveLocalConfig)
{
configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success;
@@ -211,16 +209,10 @@ namespace GitHub.Runner.Listener
foreach (var config in jitConfig)
{
var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key);
var configContent = Convert.FromBase64String(config.Value);
#if OS_WINDOWS
if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials))
{
configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine);
}
#endif
File.WriteAllBytes(configFile, configContent);
var configContent = Encoding.UTF8.GetString(Convert.FromBase64String(config.Value));
File.WriteAllText(configFile, configContent, Encoding.UTF8);
File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden);
Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'.");
Trace.Info($"Save {configContent.Length} chars to '{configFile}'.");
}
}
catch (Exception ex)
@@ -339,26 +331,13 @@ namespace GitHub.Runner.Listener
}
}
private IMessageListener GetMesageListener(RunnerSettings settings)
{
if (settings.UseV2Flow)
{
Trace.Info($"Using BrokerMessageListener");
var brokerListener = new BrokerMessageListener();
brokerListener.Initialize(HostContext);
return brokerListener;
}
return HostContext.GetService<IMessageListener>();
}
//create worker manager, create message listener and start listening to the queue
private async Task<int> RunAsync(RunnerSettings settings, bool runOnce = false)
{
try
{
Trace.Info(nameof(RunAsync));
_listener = GetMesageListener(settings);
_listener = HostContext.GetService<IMessageListener>();
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
{
return Constants.Runner.ReturnCode.TerminatedError;
@@ -523,7 +502,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (MessageUtil.IsRunServiceJob(message.MessageType))
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
{
if (autoUpdateInProgress || runOnceJobReceived)
{

View File

@@ -73,7 +73,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.Add(new ProductInfoHeaderValue($"GitHubActionsRunner-Plugin", BuildConstants.RunnerPackage.Version));
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{

View File

@@ -164,6 +164,7 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -206,11 +207,6 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -40,19 +40,10 @@ namespace GitHub.Runner.Sdk
File.WriteAllText(path, StringUtil.ConvertToJson(obj), Encoding.UTF8);
}
public static T LoadObject<T>(string path, bool required = false)
public static T LoadObject<T>(string path)
{
string json = File.ReadAllText(path, Encoding.UTF8);
if (required && string.IsNullOrEmpty(json))
{
throw new ArgumentNullException($"File {path} is empty");
}
T result = StringUtil.ConvertFromJson<T>(json);
if (required && result == null)
{
throw new ArgumentException("Converting json to object resulted in a null value");
}
return result;
return StringUtil.ConvertFromJson<T>(json);
}
public static string GetSha256Hash(string path)

View File

@@ -123,12 +123,5 @@ namespace GitHub.Runner.Sdk
{
return value?.Substring(0, Math.Min(value.Length, count));
}
// Fixes format violations e.g. https://github.com/actions/runner/issues/2165
public static string SanitizeUserAgentHeader(string header)
{
return header.Replace("(", "[").Replace(")", "]").Trim();
}
}
}

View File

@@ -6,16 +6,9 @@ namespace GitHub.Runner.Sdk
{
public static bool IsHostedServer(UriBuilder gitHubUrl)
{
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_FORCE_GHES")))
{
return false;
}
return
string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.com", StringComparison.OrdinalIgnoreCase);
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
}
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)

View File

@@ -19,7 +19,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.AddRange(additionalUserAgents);
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
@@ -116,7 +116,7 @@ namespace GitHub.Runner.Sdk
// settings are applied to an HttpRequestMessage.
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.ToOAuthCredentials(), settings), additionalDelegatingHandler);
return connection;
}

View File

@@ -25,6 +25,7 @@ namespace GitHub.Runner.Worker
public interface IActionRunner : IStep, IRunnerService
{
ActionRunStage Stage { get; set; }
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Pipelines.ActionStep Action { get; set; }
}
@@ -284,67 +285,25 @@ namespace GitHub.Runner.Worker
}
/// <summary>
/// Attempts to update the DisplayName.
/// As the "Try..." name implies, this method should never throw an exception.
/// Returns true if the DisplayName is already present or it was successfully updated.
/// </summary>
public bool TryUpdateDisplayName(out bool updated)
{
updated = false;
// REVIEW: This try/catch can be removed if some future implementation of EvaluateDisplayName and UpdateTimelineRecordDisplayName
// can make reasonable guarantees that they won't throw an exception.
try
{
// This attempt is only worthwhile at the "Main" stage.
// When the job starts, there's an initial attempt to evaluate the DisplayName. (see JobExtension::InitializeJob)
// During the "Pre" stage, we expect that no contexts will have changed since the initial evaluation.
// "Main" stage is handled here.
// During the "Post" stage, it no longer matters.
if (this.Stage == ActionRunStage.Main && EvaluateDisplayName(this.ExecutionContext.ExpressionValues, this.ExecutionContext, out updated))
{
if (updated)
{
this.ExecutionContext.UpdateTimelineRecordDisplayName(this.DisplayName);
}
}
}
catch (Exception ex)
{
Trace.Warning("Caught exception while attempting to evaulate/update the step's DisplayName. Exception Details: {0}", ex);
}
// For consistency with other implementations of TryUpdateDisplayName we use !string.IsNullOrEmpty below,
// but note that (at the time of this writing) ActionRunner::DisplayName::get always returns a non-empty string due to its fallback logic.
// In other words, the net effect is that this particular implementation of TryUpdateDisplayName will always return true.
return !string.IsNullOrEmpty(this.DisplayName);
}
/// <summary>
/// Attempts to evaluate the DisplayName of this IActionRunner.
/// Returns true if the DisplayName is already present or it was successfully evaluated.
/// </summary>
public bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated)
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
{
ArgUtil.NotNull(context, nameof(context));
ArgUtil.NotNull(Action, nameof(Action));
updated = false;
// If we have already expanded the display name, don't bother attempting [re-]expansion.
// If we have already expanded the display name, there is no need to expand it again
// TODO: Remove the ShouldEvaluateDisplayName check and field post m158 deploy, we should do it by default once the server is updated
if (_didFullyEvaluateDisplayName || !string.IsNullOrEmpty(Action.DisplayName))
{
return true;
return false;
}
_displayName = GenerateDisplayName(Action, contextData, context, out bool didFullyEvaluate);
bool didFullyEvaluate;
_displayName = GenerateDisplayName(Action, contextData, context, out didFullyEvaluate);
// If we evaluated, fully mask any secrets
// If we evaluated fully mask any secrets
if (didFullyEvaluate)
{
_displayName = HostContext.SecretMasker.MaskSecrets(_displayName);
updated = true;
}
context.Debug($"Set step '{Action.Name}' display name to: '{_displayName}'");
_didFullyEvaluateDisplayName = didFullyEvaluate;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
@@ -6,7 +6,6 @@ using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
@@ -81,7 +80,7 @@ namespace GitHub.Runner.Worker
// logging
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
// timeline record update methods
void Start(string currentOperation = null);
@@ -438,17 +437,6 @@ namespace GitHub.Runner.Worker
PublishStepTelemetry();
var stepResult = new StepResult();
stepResult.ExternalID = _record.Id;
stepResult.Conclusion = _record.Result ?? TaskResult.Succeeded;
stepResult.Status = _record.State;
stepResult.Number = _record.Order;
stepResult.Name = _record.Name;
stepResult.StartedAt = _record.StartTime;
stepResult.CompletedAt = _record.FinishTime;
Global.StepsResult.Add(stepResult);
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
@@ -722,9 +710,6 @@ namespace GitHub.Runner.Worker
// ActionsStepTelemetry for entire job
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
// Steps results for entire job
Global.StepsResult = new List<StepResult>();
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
@@ -871,7 +856,8 @@ namespace GitHub.Runner.Worker
{
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
_jobServerQueue.QueueSummaryUpload(stepRecordId, name, filePath, deleteSource: false);
}
// Add OnMatcherChanged

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
@@ -17,7 +16,6 @@ namespace GitHub.Runner.Worker
public IList<String> FileTable { get; set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
public List<StepResult> StepsResult { get; set; }
public List<JobTelemetry> JobTelemetry { get; set; }
public TaskOrchestrationPlanReference Plan { get; set; }
public List<string> PrependPath { get; set; }

View File

@@ -306,13 +306,13 @@ namespace GitHub.Runner.Worker
}
}
actionRunner.EvaluateDisplayName(contextData, context, out _);
actionRunner.TryEvaluateDisplayName(contextData, context);
jobSteps.Add(actionRunner);
if (prepareResult.PreStepTracker.TryGetValue(step.Id, out var preStep))
{
Trace.Info($"Adding pre-{action.DisplayName}.");
preStep.EvaluateDisplayName(contextData, context, out _);
preStep.TryEvaluateDisplayName(contextData, context);
preStep.DisplayName = $"Pre {preStep.DisplayName}";
preJobSteps.Add(preStep);
}
@@ -328,10 +328,10 @@ namespace GitHub.Runner.Worker
if (message.ContextData.TryGetValue("inputs", out var pipelineContextData))
{
var inputs = pipelineContextData.AssertDictionary("inputs");
if (inputs.Any())
if (inputs.Any())
{
context.Output($"##[group] Inputs");
foreach (var input in inputs)
foreach (var input in inputs)
{
context.Output($" {input.Key}: {input.Value}");
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.Runner.Worker
{
@@ -33,18 +32,5 @@ namespace GitHub.Runner.Worker
{
await _runAsync(ExecutionContext, _data);
}
public bool TryUpdateDisplayName(out bool updated)
{
updated = false;
return !string.IsNullOrEmpty(this.DisplayName);
}
public bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated)
{
updated = false;
return !string.IsNullOrEmpty(this.DisplayName);
}
}
}

View File

@@ -6,7 +6,7 @@ using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -20,7 +20,7 @@ namespace GitHub.Runner.Worker
[ServiceLocator(Default = typeof(JobRunner))]
public interface IJobRunner : IRunnerService
{
Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
}
public sealed class JobRunner : RunnerService, IJobRunner
@@ -29,7 +29,7 @@ namespace GitHub.Runner.Worker
private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
{
// Validate parameters.
Trace.Entering();
@@ -40,34 +40,21 @@ namespace GitHub.Runner.Worker
Trace.Info("Job ID {0}", message.JobId);
DateTime jobStartTimeUtc = DateTime.UtcNow;
IRunnerService server = null;
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (MessageUtil.IsRunServiceJob(message.MessageType))
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
server = runServer;
}
else
{
// Setup the job server and job server queue.
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
Uri jobServerUrl = systemConnection.Url;
Trace.Info($"Creating job server with URL: {jobServerUrl}");
// jobServerQueue is the throttling reporter.
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message);
server = jobServer;
}
// Setup the job server and job server queue.
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
Uri jobServerUrl = systemConnection.Url;
Trace.Info($"Creating job server with URL: {jobServerUrl}");
// jobServerQueue is the throttling reporter.
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message);
HostContext.WritePerfCounter($"WorkerJobServerQueueStarted_{message.RequestId.ToString()}");
IExecutionContext jobContext = null;
@@ -112,7 +99,7 @@ namespace GitHub.Runner.Worker
{
Trace.Error(ex);
jobContext.Error(ex);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
}
if (jobContext.Global.WriteDebug)
@@ -149,7 +136,7 @@ namespace GitHub.Runner.Worker
// don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job is cancelled during initialize.");
Trace.Error($"Caught exception: {ex}");
return await CompleteJobAsync(server, jobContext, message, TaskResult.Canceled);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
}
catch (Exception ex)
{
@@ -157,7 +144,7 @@ namespace GitHub.Runner.Worker
// don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job initialize failed.");
Trace.Error($"Caught exception from {nameof(jobExtension.InitializeJob)}: {ex}");
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
}
// trace out all steps
@@ -194,7 +181,7 @@ namespace GitHub.Runner.Worker
// Log the error and fail the job.
Trace.Error($"Caught exception from job steps {nameof(StepsRunner)}: {ex}");
jobContext.Error(ex);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
}
finally
{
@@ -205,7 +192,7 @@ namespace GitHub.Runner.Worker
Trace.Info($"Job result after all job steps finish: {jobContext.Result ?? TaskResult.Succeeded}");
Trace.Info("Completing the job execution context.");
return await CompleteJobAsync(server, jobContext, message);
return await CompleteJobAsync(jobServer, jobContext, message);
}
finally
{
@@ -219,66 +206,6 @@ namespace GitHub.Runner.Worker
}
}
private async Task<TaskResult> CompleteJobAsync(IRunnerService server, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
if (server is IRunServer runServer)
{
return await CompleteJobAsync(runServer, jobContext, message, taskResult);
}
else if (server is IJobServer jobServer)
{
return await CompleteJobAsync(jobServer, jobContext, message, taskResult);
}
else
{
throw new NotSupportedException();
}
}
private async Task<TaskResult> CompleteJobAsync(IRunServer runServer, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
jobContext.Debug($"Finishing: {message.JobDisplayName}");
TaskResult result = jobContext.Complete(taskResult);
if (jobContext.Global.Variables.TryGetValue("Node12ActionsWarnings", out var node12Warnings))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node12Warnings));
jobContext.Warning(string.Format(Constants.Runner.Node12DetectedAfterEndOfLife, actions));
}
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
_tempDirectoryManager?.CleanupTempDirectory();
// Load any upgrade telemetry
LoadFromTelemetryFile(jobContext.Global.JobTelemetry);
// Make sure we don't submit secrets as telemetry
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
Trace.Info($"Raising job completed against run service");
var completeJobRetryLimit = 5;
var exceptions = new List<Exception>();
while (completeJobRetryLimit-- > 0)
{
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, default);
return result;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to complete job {message.JobId}, job request {message.RequestId}.");
Trace.Error(ex);
exceptions.Add(ex);
}
// delay 5 seconds before next retry.
await Task.Delay(TimeSpan.FromSeconds(5));
}
// rethrow exceptions from all attempts.
throw new AggregateException(exceptions);
}
private async Task<TaskResult> CompleteJobAsync(IJobServer jobServer, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
jobContext.Debug($"Finishing: {message.JobDisplayName}");

View File

@@ -1,9 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
@@ -11,6 +14,8 @@ using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Expressions;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -21,8 +26,6 @@ namespace GitHub.Runner.Worker
string DisplayName { get; set; }
IExecutionContext ExecutionContext { get; set; }
TemplateToken Timeout { get; }
bool TryUpdateDisplayName(out bool updated);
bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated);
Task RunAsync();
}
@@ -192,12 +195,6 @@ namespace GitHub.Runner.Worker
}
else
{
// This is our last, best chance to expand the display name. (At this point, all the requirements for successful expansion should be met.)
// That being said, evaluating the display name should still be considered as a "best effort" exercise. (It's not critical or paramount.)
// For that reason, we call a safe "Try..." wrapper method to ensure that any potential problems we encounter in evaluating the display name
// don't interfere with our ultimate goal within this code block: evaluation of the condition.
step.TryUpdateDisplayName(out _);
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
@@ -259,6 +256,14 @@ namespace GitHub.Runner.Worker
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)
{
// Check to see if we can expand the display name
if (step is IActionRunner actionRunner &&
actionRunner.Stage == ActionRunStage.Main &&
actionRunner.TryEvaluateDisplayName(step.ExecutionContext.ExpressionValues, step.ExecutionContext))
{
step.ExecutionContext.UpdateTimelineRecordDisplayName(actionRunner.DisplayName);
}
// Start the step
Trace.Info("Starting the step.");
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");

View File

@@ -0,0 +1,20 @@
using GitHub.Services.OAuth;
namespace GitHub.Services.Common
{
public static class VssCredentialsExtension
{
public static VssOAuthCredential ToOAuthCredentials(
this VssCredentials credentials)
{
if (credentials.Federated.CredentialType == VssCredentialsType.OAuth)
{
return credentials.Federated as VssOAuthCredential;
}
else
{
return null;
}
}
}
}

View File

@@ -184,33 +184,9 @@ namespace GitHub.Services.Common
return settings;
}
/// <summary>
/// Gets or sets the maximum size allowed for response content buffering.
/// </summary>
[DefaultValue(c_defaultContentBufferSize)]
public Int32 MaxContentBufferSize
{
get
{
return m_maxContentBufferSize;
}
set
{
ArgumentUtility.CheckForOutOfRange(value, nameof(value), 0, c_maxAllowedContentBufferSize);
m_maxContentBufferSize = value;
}
}
private static Lazy<RawClientHttpRequestSettings> s_defaultSettings
= new Lazy<RawClientHttpRequestSettings>(ConstructDefaultSettings);
private Int32 m_maxContentBufferSize;
// We will buffer a maximum of 1024MB in the message handler
private const Int32 c_maxAllowedContentBufferSize = 1024 * 1024 * 1024;
// We will buffer, by default, up to 512MB in the message handler
private const Int32 c_defaultContentBufferSize = 1024 * 1024 * 512;
private const Int32 c_defaultMaxRetry = 3;
private static readonly TimeSpan s_defaultTimeout = TimeSpan.FromSeconds(100); //default WebAPI timeout
private ICollection<CultureInfo> m_acceptLanguages = new List<CultureInfo>();

View File

@@ -9,23 +9,23 @@ using GitHub.Services.OAuth;
namespace GitHub.Services.Common
{
public class RawHttpMessageHandler : HttpMessageHandler
public class RawHttpMessageHandler: HttpMessageHandler
{
public RawHttpMessageHandler(
FederatedCredential credentials)
VssOAuthCredential credentials)
: this(credentials, new RawClientHttpRequestSettings())
{
}
public RawHttpMessageHandler(
FederatedCredential credentials,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
: this(credentials, settings, new HttpClientHandler())
{
}
public RawHttpMessageHandler(
FederatedCredential credentials,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
HttpMessageHandler innerHandler)
{
@@ -56,7 +56,7 @@ namespace GitHub.Services.Common
/// <summary>
/// Gets the credentials associated with this handler.
/// </summary>
public FederatedCredential Credentials
public VssOAuthCredential Credentials
{
get;
private set;
@@ -111,7 +111,7 @@ namespace GitHub.Services.Common
// Ensure that we attempt to use the most appropriate authentication mechanism by default.
if (m_tokenProvider == null)
{
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
m_tokenProvider = this.Credentials.GetTokenProvider(request.RequestUri);
}
}
@@ -120,7 +120,6 @@ namespace GitHub.Services.Common
Boolean succeeded = false;
HttpResponseMessageWrapper responseWrapper;
Boolean lastResponseDemandedProxyAuth = false;
Int32 retries = m_maxAuthRetries;
try
{
@@ -139,13 +138,7 @@ namespace GitHub.Services.Common
// Let's start with sending a token
IssuedToken token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
// The WinHttpHandler will chunk any content that does not have a computed length which is
// not what we want. By loading into a buffer up-front we bypass this behavior and there is
// no difference in the normal HttpClientHandler behavior here since this is what they were
// already doing.
await BufferRequestContentAsync(request, tokenSource.Token).ConfigureAwait(false);
ApplyToken(request, token);
// ConfigureAwait(false) enables the continuation to be run outside any captured
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
@@ -154,8 +147,7 @@ namespace GitHub.Services.Common
responseWrapper = new HttpResponseMessageWrapper(response);
var isUnAuthorized = responseWrapper.StatusCode == HttpStatusCode.Unauthorized;
lastResponseDemandedProxyAuth = responseWrapper.StatusCode == HttpStatusCode.ProxyAuthenticationRequired;
if (!isUnAuthorized && !lastResponseDemandedProxyAuth)
if (!isUnAuthorized)
{
// Validate the token after it has been successfully authenticated with the server.
m_tokenProvider?.ValidateToken(token, responseWrapper);
@@ -219,42 +211,15 @@ namespace GitHub.Services.Common
}
}
private static async Task BufferRequestContentAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
if (request.Content != null &&
request.Headers.TransferEncodingChunked != true)
{
Int64? contentLength = request.Content.Headers.ContentLength;
if (contentLength == null)
{
await request.Content.LoadIntoBufferAsync().EnforceCancellation(cancellationToken).ConfigureAwait(false);
}
// Explicitly turn off chunked encoding since we have computed the request content size
request.Headers.TransferEncodingChunked = false;
}
}
private void ApplyToken(
HttpRequestMessage request,
IssuedToken token,
bool applyICredentialsToWebProxy = false)
IssuedToken token)
{
switch (token)
{
case null:
return;
case ICredentials credentialsToken:
if (applyICredentialsToWebProxy)
{
HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler;
if (httpClientHandler != null && httpClientHandler.Proxy != null)
{
httpClientHandler.Proxy.Credentials = credentialsToken;
}
}
m_credentialWrapper.InnerCredentials = credentialsToken;
break;
default:
@@ -289,7 +254,7 @@ namespace GitHub.Services.Common
private CredentialWrapper m_credentialWrapper;
private object m_thisLock;
private const Int32 m_maxAuthRetries = 3;
private IssuedTokenProvider m_tokenProvider;
private VssOAuthTokenProvider m_tokenProvider;
//.Net Core does not attempt NTLM schema on Linux, unless ICredentials is a CredentialCache instance
//This workaround may not be needed after this corefx fix is consumed: https://github.com/dotnet/corefx/pull/7923

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;

View File

@@ -1,6 +1,6 @@
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.Logging;
namespace GitHub.DistributedTask.Expressions2.Sdk
@@ -55,7 +55,7 @@ namespace GitHub.DistributedTask.Expressions2.Sdk
try
{
// Evaluate
secretMasker = secretMasker?.Clone() ?? new SecretMasker(Enumerable.Empty<ValueEncoder>());
secretMasker = secretMasker?.Clone() ?? new SecretMasker();
trace = new EvaluationTraceWriter(trace, secretMasker);
var context = new EvaluationContext(trace, secretMasker, state, options, this);
trace.Info($"Evaluating: {ConvertToExpression()}");

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.ComponentModel;
namespace GitHub.DistributedTask.Logging
@@ -8,6 +8,7 @@ namespace GitHub.DistributedTask.Logging
{
void AddRegex(String pattern);
void AddValue(String value);
void AddValueEncoder(ValueEncoder encoder);
ISecretMasker Clone();
String MaskSecrets(String input);
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
@@ -10,11 +10,11 @@ namespace GitHub.DistributedTask.Logging
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class SecretMasker : ISecretMasker, IDisposable
{
public SecretMasker(IEnumerable<ValueEncoder> encoders)
public SecretMasker()
{
m_originalValueSecrets = new HashSet<ValueSecret>();
m_regexSecrets = new HashSet<RegexSecret>();
m_valueEncoders = new HashSet<ValueEncoder>(encoders ?? Enumerable.Empty<ValueEncoder>());
m_valueEncoders = new HashSet<ValueEncoder>();
m_valueSecrets = new HashSet<ValueSecret>();
}
@@ -104,11 +104,15 @@ namespace GitHub.DistributedTask.Logging
}
}
var secretVariations = valueEncoders.SelectMany(encoder => encoder(value))
.Where(variation => !string.IsNullOrEmpty(variation))
.Distinct()
.Select(variation => new ValueSecret(variation));
valueSecrets.AddRange(secretVariations);
// Compute the encoded values.
foreach (ValueEncoder valueEncoder in valueEncoders)
{
String encodedValue = valueEncoder(value);
if (!String.IsNullOrEmpty(encodedValue))
{
valueSecrets.Add(new ValueSecret(encodedValue));
}
}
// Write section.
try
@@ -131,6 +135,69 @@ namespace GitHub.DistributedTask.Logging
}
}
/// <summary>
/// This implementation assumes no more than one thread is adding regexes, values, or encoders at any given time.
/// </summary>
public void AddValueEncoder(ValueEncoder encoder)
{
ValueSecret[] originalSecrets;
// Read section.
try
{
m_lock.EnterReadLock();
// Test whether already added.
if (m_valueEncoders.Contains(encoder))
{
return;
}
// Read the original value secrets.
originalSecrets = m_originalValueSecrets.ToArray();
}
finally
{
if (m_lock.IsReadLockHeld)
{
m_lock.ExitReadLock();
}
}
// Compute the encoded values.
var encodedSecrets = new List<ValueSecret>();
foreach (ValueSecret originalSecret in originalSecrets)
{
String encodedValue = encoder(originalSecret.m_value);
if (!String.IsNullOrEmpty(encodedValue))
{
encodedSecrets.Add(new ValueSecret(encodedValue));
}
}
// Write section.
try
{
m_lock.EnterWriteLock();
// Add the encoder.
m_valueEncoders.Add(encoder);
// Add the values.
foreach (ValueSecret encodedSecret in encodedSecrets)
{
m_valueSecrets.Add(encodedSecret);
}
}
finally
{
if (m_lock.IsWriteLockHeld)
{
m_lock.ExitWriteLock();
}
}
}
public ISecretMasker Clone() => new SecretMasker(this);
public void Dispose()

View File

@@ -1,97 +1,73 @@
using System;
using System.Collections.Generic;
using System;
using System.ComponentModel;
using System.Linq;
using System.Security;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Logging
{
[EditorBrowsable(EditorBrowsableState.Never)]
public delegate IEnumerable<string> ValueEncoder(string value);
public delegate String ValueEncoder(String value);
[EditorBrowsable(EditorBrowsableState.Never)]
public static class ValueEncoders
{
public static IEnumerable<string> EnumerateBase64Variations(string value)
public static String Base64StringEscape(String value)
{
if (!string.IsNullOrEmpty(value))
{
// A byte is 8 bits. A Base64 "digit" can hold a maximum of 6 bits (2^64 - 1, or values 0 to 63).
// As a result, many Unicode characters (including single-byte letters) cannot be represented using a single Base64 digit.
// Furthermore, on average a Base64 string will be about 33% longer than the original text.
// This is because it generally requires 4 Base64 digits to represent 3 Unicode bytes. (4 / 3 ~ 1.33)
//
// Because of this 4:3 ratio (or, more precisely, 8 bits : 6 bits ratio), there's a cyclical pattern
// to when a byte boundary aligns with a Base64 digit boundary.
// The pattern repeats every 24 bits (the lowest common multiple of 8 and 6).
//
// |-----------24 bits-------------|-----------24 bits------------|
// Base64 Digits: |digit 0|digit 1|digit 2|digit 3|digit 4|digit 5|digit 6|digit7|
// Allocated Bits: aaaaaa aaBBBB BBBBcc cccccc DDDDDD DDeeee eeeeFF FFFFFF
// Unicode chars: |0th char |1st char |2nd char |3rd char |4th char |5th char |
return Convert.ToBase64String(Encoding.UTF8.GetBytes(value));
}
// Depending on alignment, the Base64-encoded secret can take any of 3 basic forms.
// For example, the Base64 digits representing "abc" could appear as any of the following:
// "YWJj" when aligned
// ".!FiYw==" when preceded by 3x + 1 bytes
// "..!hYmM=" when preceded by 3x + 2 bytes
// (where . represents an unrelated Base64 digit, ! represents a Base64 digit that should be masked, and x represents any non-negative integer)
// Base64 is 6 bits -> char
// A byte is 8 bits
// When end user doing somthing like base64(user:password)
// The length of the leading content will cause different base64 encoding result on the password
// So we add base64(value shifted 1 and two bytes) as secret as well.
// B1 B2 B3 B4 B5 B6 B7
// 000000|00 0000|0000 00|000000| 000000|00 0000|0000 00|000000|
// Char1 Char2 Char3 Char4
// See the above, the first byte has a character beginning at index 0, the second byte has a character beginning at index 4, the third byte has a character beginning at index 2 and then the pattern repeats
// We register byte offsets for all these possible values
public static String Base64StringEscapeShift1(String value)
{
return Base64StringEscapeShift(value, 1);
}
var rawBytes = Encoding.UTF8.GetBytes(value);
for (var offset = 0; offset <= 2; offset++)
{
var prunedBytes = rawBytes.Skip(offset).ToArray();
if (prunedBytes.Length > 0)
{
// Don't include Base64 padding characters (=) in Base64 representations of the secret.
// They don't represent anything interesting, so they don't need to be masked.
// (Some clients omit the padding, so we want to be sure we recognize the secret regardless of whether the padding is present or not.)
var buffer = new StringBuilder(Convert.ToBase64String(prunedBytes).TrimEnd(BASE64_PADDING_SUFFIX));
yield return buffer.ToString();
// Also, yield the RFC4648-equivalent RegEx.
buffer.Replace('+', '-');
buffer.Replace('/', '_');
yield return buffer.ToString();
}
}
}
public static String Base64StringEscapeShift2(String value)
{
return Base64StringEscapeShift(value, 2);
}
// Used when we pass environment variables to docker to escape " with \"
public static IEnumerable<string> CommandLineArgumentEscape(string value)
public static String CommandLineArgumentEscape(String value)
{
yield return value.Replace("\"", "\\\"");
return value.Replace("\"", "\\\"");
}
public static IEnumerable<string> ExpressionStringEscape(string value)
public static String ExpressionStringEscape(String value)
{
yield return Expressions2.Sdk.ExpressionUtility.StringEscape(value);
return Expressions2.Sdk.ExpressionUtility.StringEscape(value);
}
public static IEnumerable<string> JsonStringEscape(string value)
public static String JsonStringEscape(String value)
{
// Convert to a JSON string and then remove the leading/trailing double-quote.
String jsonString = JsonConvert.ToString(value);
String jsonEscapedValue = jsonString.Substring(startIndex: 1, length: jsonString.Length - 2);
yield return jsonEscapedValue;
return jsonEscapedValue;
}
public static IEnumerable<string> UriDataEscape(string value)
public static String UriDataEscape(String value)
{
yield return UriDataEscape(value, 65519);
return UriDataEscape(value, 65519);
}
public static IEnumerable<string> XmlDataEscape(string value)
public static String XmlDataEscape(String value)
{
yield return SecurityElement.Escape(value);
return SecurityElement.Escape(value);
}
public static IEnumerable<string> TrimDoubleQuotes(string value)
public static String TrimDoubleQuotes(String value)
{
var trimmed = string.Empty;
if (!string.IsNullOrEmpty(value) &&
@@ -102,17 +78,17 @@ namespace GitHub.DistributedTask.Logging
trimmed = value.Substring(1, value.Length - 2);
}
yield return trimmed;
return trimmed;
}
public static IEnumerable<string> PowerShellPreAmpersandEscape(string value)
public static String PowerShellPreAmpersandEscape(String value)
{
// if the secret is passed to PS as a command and it causes an error, sections of it can be surrounded by color codes
// or printed individually.
// or printed individually.
// The secret secretpart1&secretpart2&secretpart3 would be split into 2 sections:
// 'secretpart1&secretpart2&' and 'secretpart3'. This method masks for the first section.
// The secret secretpart1&+secretpart2&secretpart3 would be split into 2 sections:
// 'secretpart1&+' and (no 's') 'ecretpart2&secretpart3'. This method masks for the first section.
@@ -136,10 +112,10 @@ namespace GitHub.DistributedTask.Logging
}
}
yield return trimmed;
return trimmed;
}
public static IEnumerable<string> PowerShellPostAmpersandEscape(string value)
public static String PowerShellPostAmpersandEscape(String value)
{
var trimmed = string.Empty;
if (!string.IsNullOrEmpty(value) && value.Contains("&"))
@@ -161,10 +137,27 @@ namespace GitHub.DistributedTask.Logging
}
}
yield return trimmed;
return trimmed;
}
private static string UriDataEscape(string value, Int32 maxSegmentSize)
private static string Base64StringEscapeShift(String value, int shift)
{
var bytes = Encoding.UTF8.GetBytes(value);
if (bytes.Length > shift)
{
var shiftArray = new byte[bytes.Length - shift];
Array.Copy(bytes, shift, shiftArray, 0, bytes.Length - shift);
return Convert.ToBase64String(shiftArray);
}
else
{
return Convert.ToBase64String(bytes);
}
}
private static String UriDataEscape(
String value,
Int32 maxSegmentSize)
{
if (value.Length <= maxSegmentSize)
{
@@ -190,7 +183,5 @@ namespace GitHub.DistributedTask.Logging
return result.ToString();
}
private const char BASE64_PADDING_SUFFIX = '=';
}
}

View File

@@ -42,10 +42,9 @@ namespace GitHub.DistributedTask.Pipelines
IList<String> fileTable,
TemplateToken jobOutputs,
IList<TemplateToken> defaults,
ActionsEnvironmentReference actionsEnvironment,
String messageType = JobRequestMessageTypes.PipelineAgentJobRequest)
ActionsEnvironmentReference actionsEnvironment)
{
this.MessageType = messageType;
this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest;
this.Plan = plan;
this.JobId = jobId;
this.JobDisplayName = jobDisplayName;

View File

@@ -455,6 +455,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private readonly String[] s_expressionValueNames = new[]
{
PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Needs,

View File

@@ -222,9 +222,6 @@
},
"job-outputs": {
"context": [
"matrix"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context"

View File

@@ -1,48 +0,0 @@
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
public class ListRunnersResponse
{
public ListRunnersResponse()
{
}
public ListRunnersResponse(ListRunnersResponse responseToBeCloned)
{
this.TotalCount = responseToBeCloned.TotalCount;
this.Runners = responseToBeCloned.Runners;
}
[JsonProperty("total_count")]
public int TotalCount
{
get;
set;
}
[JsonProperty("runners")]
public List<Runner> Runners
{
get;
set;
}
public ListRunnersResponse Clone()
{
return new ListRunnersResponse(this);
}
public List<TaskAgent> ToTaskAgents()
{
return Runners.Select(runner => new TaskAgent() { Name = runner.Name }).ToList();
}
}
}

View File

@@ -1,28 +1,25 @@
using System;
using System.Collections.Generic;
using System;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
namespace GitHub.DistributedTask.WebApi
{
public class BrokerHttpClient : RawHttpClientBase
[ResourceArea(TaskResourceIds.AreaId)]
public class RunServiceHttpClient : RawHttpClientBase
{
public BrokerHttpClient(
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
: base(baseUrl, credentials)
{
}
public BrokerHttpClient(
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
@@ -30,7 +27,7 @@ namespace GitHub.Actions.RunService.WebApi
{
}
public BrokerHttpClient(
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
params DelegatingHandler[] handlers)
@@ -38,7 +35,7 @@ namespace GitHub.Actions.RunService.WebApi
{
}
public BrokerHttpClient(
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
@@ -47,7 +44,7 @@ namespace GitHub.Actions.RunService.WebApi
{
}
public BrokerHttpClient(
public RunServiceHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
@@ -55,29 +52,23 @@ namespace GitHub.Actions.RunService.WebApi
{
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(
string runnerVersion,
TaskAgentStatus? status,
CancellationToken cancellationToken = default
)
public Task<Pipelines.AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
CancellationToken cancellationToken = default)
{
var requestUri = new Uri(Client.BaseAddress, "message");
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new {
StreamID = messageId
};
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
if (status != null)
{
queryParams.Add("status", status.Value.ToString());
}
if (runnerVersion != null)
{
queryParams.Add("runnerVersion", runnerVersion);
}
return SendAsync<TaskAgentMessage>(
new HttpMethod("GET"),
var payloadJson = JsonUtility.ToString(payload);
var requestContent = new StringContent(payloadJson, System.Text.Encoding.UTF8, "application/json");
return SendAsync<Pipelines.AgentJobRequestMessage>(
httpMethod,
additionalHeaders: null,
requestUri: requestUri,
queryParameters: queryParams,
content: requestContent,
cancellationToken: cancellationToken);
}
}

View File

@@ -1,63 +0,0 @@
using System;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.WebApi
{
public class Runner
{
public class Authorization
{
/// <summary>
/// The url to refresh tokens
/// </summary>
[JsonProperty("authorization_url")]
public Uri AuthorizationUrl
{
get;
internal set;
}
/// <summary>
/// The url to connect to to poll for messages
/// </summary>
[JsonProperty("server_url")]
public string ServerUrl
{
get;
internal set;
}
/// <summary>
/// The client id to use when connecting to the authorization_url
/// </summary>
[JsonProperty("client_id")]
public string ClientId
{
get;
internal set;
}
}
[JsonProperty("name")]
public string Name
{
get;
internal set;
}
[JsonProperty("id")]
public Int32 Id
{
get;
internal set;
}
[JsonProperty("authorization")]
public Authorization RunnerAuthorization
{
get;
internal set;
}
}
}

View File

@@ -1,98 +0,0 @@
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
using System.ComponentModel;
using System.Collections.Generic;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
/// <summary>
/// An organization-level grouping of runners.
/// </summary>
[DataContract]
public class RunnerGroup
{
internal RunnerGroup()
{
}
public RunnerGroup(String name)
{
this.Name = name;
}
private RunnerGroup(RunnerGroup poolToBeCloned)
{
this.Id = poolToBeCloned.Id;
this.IsHosted = poolToBeCloned.IsHosted;
this.Name = poolToBeCloned.Name;
this.IsDefault = poolToBeCloned.IsDefault;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("id")]
public Int32 Id
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("name")]
public String Name
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is internal and can't be modified by users
/// </summary>
[DataMember]
[JsonProperty("default")]
public bool IsDefault
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is managed by the service.
/// </summary>
[DataMember]
[JsonProperty("is_hosted")]
public Boolean IsHosted
{
get;
set;
}
}
public class RunnerGroupList
{
public RunnerGroupList()
{
this.RunnerGroups = new List<RunnerGroup>();
}
public List<TaskAgentPool> ToAgentPoolList()
{
var agentPools = this.RunnerGroups.Select(x => new TaskAgentPool(x.Name)
{
Id = x.Id,
IsHosted = x.IsHosted,
IsInternal = x.IsDefault
}).ToList();
return agentPools;
}
[JsonProperty("runner_groups")]
public List<RunnerGroup> RunnerGroups { get; set; }
[JsonProperty("total_count")]
public int Count { get; set; }
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Services.Common;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
@@ -100,7 +100,6 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String Summary = "DistributedTask.Core.Summary";
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
public static readonly String ResultsLog = "Results.Core.Log";
}
[GenerateAllConstants]

View File

@@ -1,13 +0,0 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class AcquireJobRequest
{
[DataMember(Name = "streamId", EmitDefaultValue = false)]
public string StreamID { get; set; }
}
}

View File

@@ -1,26 +0,0 @@
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class CompleteJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
[DataMember(Name = "conclusion")]
public TaskResult Conclusion { get; set; }
[DataMember(Name = "outputs", EmitDefaultValue = false)]
public Dictionary<string, VariableValue> Outputs { get; set; }
[DataMember(Name = "stepResults", EmitDefaultValue = false)]
public IList<StepResult> StepResults { get; set; }
}
}

View File

@@ -1,15 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class RenewJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
}
}

View File

@@ -1,16 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace Sdk.RSWebApi.Contracts
{
[DataContract]
public class RenewJobResponse
{
[DataMember]
public DateTime LockedUntil
{
get;
internal set;
}
}
}

View File

@@ -1,38 +0,0 @@
using System;
using System.Runtime.Serialization;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class StepResult
{
[DataMember(Name = "external_id", EmitDefaultValue = false)]
public Guid ExternalID { get; set; }
[DataMember(Name = "number", EmitDefaultValue = false)]
public int? Number { get; set; }
[DataMember(Name = "name", EmitDefaultValue = false)]
public string Name { get; set; }
[DataMember(Name = "status")]
public TimelineRecordState? Status { get; set; }
[DataMember(Name = "conclusion")]
public TaskResult? Conclusion { get; set; }
[DataMember(Name = "started_at", EmitDefaultValue = false)]
public DateTime? StartedAt { get; set; }
[DataMember(Name = "completed_at", EmitDefaultValue = false)]
public DateTime? CompletedAt { get; set; }
[DataMember(Name = "completed_log_url", EmitDefaultValue = false)]
public string CompletedLogURL { get; set; }
[DataMember(Name = "completed_log_lines", EmitDefaultValue = false)]
public long? CompletedLogLines { get; set; }
}
}

View File

@@ -1,131 +0,0 @@
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
public class RunServiceHttpClient : RawHttpClientBase
{
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
: base(baseUrl, credentials)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
: base(baseUrl, credentials, settings)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, settings, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
}
public Task<AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new AcquireJobRequest
{
StreamID = messageId
};
requestUri = new Uri(requestUri, "acquirejob");
var requestContent = new ObjectContent<AcquireJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<AgentJobRequestMessage>(
httpMethod,
requestUri: requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
public Task CompleteJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new CompleteJobRequest()
{
PlanID = planId,
JobID = jobId,
Conclusion = result,
Outputs = outputs,
StepResults = stepResults
};
requestUri = new Uri(requestUri, "completejob");
var requestContent = new ObjectContent<CompleteJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new RenewJobRequest()
{
PlanID = planId,
JobID = jobId
};
requestUri = new Uri(requestUri, "renewjob");
var requestContent = new ObjectContent<RenewJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<RenewJobResponse>(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
}
}

View File

@@ -1,4 +1,3 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
@@ -47,126 +46,12 @@ namespace GitHub.Services.Results.Contracts
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string StepBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
[DataMember]
public long SoftSizeLimit;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class JobLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string StepBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateMetadataResponse
public class CreateStepSummaryMetadataResponse
{
[DataMember]
public bool Ok;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepsUpdateRequest
{
[DataMember]
public IEnumerable<Step> Steps;
[DataMember]
public long ChangeOrder;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class Step
{
[DataMember]
public string ExternalId;
[DataMember]
public int Number;
[DataMember]
public string Name;
[DataMember]
public Status Status;
[DataMember]
public string StartedAt;
[DataMember]
public string CompletedAt;
}
public enum Status
{
StatusUnknown = 0,
StatusInProgress = 3,
StatusPending = 5,
StatusCompleted = 6
}
public static class BlobStorageTypes
{
public static readonly string AzureBlobStorage = "BLOB_STORAGE_TYPE_AZURE";

View File

@@ -101,17 +101,6 @@ namespace Sdk.WebApi.WebApi
}
}
protected Task<T> SendAsync<T>(
HttpMethod method,
Uri requestUri,
HttpContent content = null,
IEnumerable<KeyValuePair<String, String>> queryParameters = null,
Object userState = null,
CancellationToken cancellationToken = default(CancellationToken))
{
return SendAsync<T>(method, null, requestUri, content, queryParameters, userState, cancellationToken);
}
protected async Task<T> SendAsync<T>(
HttpMethod method,
IEnumerable<KeyValuePair<String, String>> additionalHeaders,

View File

@@ -1,16 +1,11 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http.Formatting;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.Results.Contracts;
using System.Net.Http.Formatting;
using Sdk.WebApi.WebApi;
namespace GitHub.Services.Results.Client
@@ -27,141 +22,70 @@ namespace GitHub.Services.Results.Client
m_token = token;
m_resultsServiceUrl = baseUrl;
m_formatter = new JsonMediaTypeFormatter();
m_changeIdCounter = 1;
}
// Get Sas URL calls
private async Task<T> GetResultsSignedURLResponse<R, T>(Uri uri, CancellationToken cancellationToken, R request)
public async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, string stepId, CancellationToken cancellationToken)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId= jobId,
WorkflowRunBackendId= planId,
StepBackendId= stepId
};
var stepSummaryUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepSummarySignedBlobURL");
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadRequest))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
using (HttpContent content = new ObjectContent<GetSignedStepSummaryURLRequest>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
return await ReadJsonContentAsync<T>(response, cancellationToken);
return await ReadJsonContentAsync<GetSignedStepSummaryURLResponse>(response, cancellationToken);
}
}
}
}
private async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, string stepId, long size, CancellationToken cancellationToken)
{
var request = new GetSignedStepSummaryURLRequest()
var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK");
var request = new StepSummaryMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString()
WorkflowJobRunBackendId= jobId,
WorkflowRunBackendId= planId,
StepBackendId = stepId,
Size = size,
UploadedAt = timestamp
};
var getStepSummarySignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepSummarySignedBlobURL);
var stepSummaryUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepSummaryMetadata");
return await GetResultsSignedURLResponse<GetSignedStepSummaryURLRequest, GetSignedStepSummaryURLResponse>(getStepSummarySignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
};
var getStepLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getJobLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedJobLogsURLRequest, GetSignedJobLogsURLResponse>(getJobLogsSignedBlobURLEndpoint, cancellationToken, request);
}
// Create metadata calls
private async Task SendRequest<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadCompleteRequest))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
using (HttpContent content = new ObjectContent<StepSummaryMetadataCreate>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateMetadataResponse>(response, cancellationToken);
var jsonResponse = await ReadJsonContentAsync<CreateStepSummaryMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark {typeof(R).Name} upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
throw new Exception($"Failed to mark step summary upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, size: {size}, timestamp: {timestamp}");
}
}
}
}
}
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepSummaryMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
Size = size,
UploadedAt = timestamp
};
var createStepSummaryMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepSummaryMetadata);
await SendRequest<StepSummaryMetadataCreate>(createStepSummaryMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
UploadedAt = timestamp,
LineCount = lineCount,
};
var createStepLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepLogsMetadata);
await SendRequest<StepLogsMetadataCreate>(createStepLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new JobLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
UploadedAt = timestamp,
LineCount = lineCount,
};
var createJobLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateJobLogsMetadata);
await SendRequest<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
private async Task<HttpResponseMessage> UploadFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
@@ -171,7 +95,7 @@ namespace GitHub.Services.Results.Client
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
request.Content.Headers.Add("x-ms-blob-type", "BlockBlob");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
@@ -184,55 +108,8 @@ namespace GitHub.Services.Results.Client
}
}
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
return response;
}
}
// Handle file upload for step summary
public async Task UploadStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
public async Task UploadStepSummaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepSummaryUploadUrlAsync(planId, jobId, stepId, cancellationToken);
@@ -251,147 +128,15 @@ namespace GitHub.Services.Results.Client
// Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
var response = await UploadFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
}
// Send step summary upload complete message
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
}
// Handle file upload for step log
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepLogUploadUrlAsync(planId, jobId, stepId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get step log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await StepLogUploadCompleteAsync(planId, jobId, stepId, lineCount, cancellationToken);
}
}
// Handle file upload for job log
public async Task UploadResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetJobLogUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get job log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await JobLogUploadCompleteAsync(planId, jobId, lineCount, cancellationToken);
}
}
private Step ConvertTimelineRecordToStep(TimelineRecord r)
{
return new Step()
{
ExternalId = r.Id.ToString(),
Number = r.Order.GetValueOrDefault(),
Name = r.Name,
Status = ConvertStateToStatus(r.State.GetValueOrDefault()),
StartedAt = r.StartTime?.ToString(Constants.TimestampFormat),
CompletedAt = r.FinishTime?.ToString(Constants.TimestampFormat)
};
}
private Status ConvertStateToStatus(TimelineRecordState s)
{
switch (s)
{
case TimelineRecordState.Completed:
return Status.StatusCompleted;
case TimelineRecordState.Pending:
return Status.StatusPending;
case TimelineRecordState.InProgress:
return Status.StatusInProgress;
default:
return Status.StatusUnknown;
}
}
public async Task UpdateWorkflowStepsAsync(Guid planId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var stepRecords = records.Where(r => String.Equals(r.RecordType, "Task", StringComparison.Ordinal));
var stepUpdateRequests = stepRecords.GroupBy(r => r.ParentId).Select(sg => new StepsUpdateRequest()
{
WorkflowRunBackendId = planId.ToString(),
WorkflowJobRunBackendId = sg.Key.ToString(),
ChangeOrder = m_changeIdCounter++,
Steps = sg.Select(ConvertTimelineRecordToStep)
});
var stepUpdateEndpoint = new Uri(m_resultsServiceUrl, Constants.WorkflowStepsUpdate);
foreach (var request in stepUpdateRequests)
{
await SendRequest<StepsUpdateRequest>(stepUpdateEndpoint, cancellationToken, request, timestamp);
}
}
private MediaTypeFormatter m_formatter;
private Uri m_resultsServiceUrl;
private string m_token;
private int m_changeIdCounter;
}
// Constants specific to results
public static class Constants
{
public static readonly string TimestampFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK";
public static readonly string ResultsReceiverTwirpEndpoint = "twirp/results.services.receiver.Receiver/";
public static readonly string GetStepSummarySignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepSummarySignedBlobURL";
public static readonly string CreateStepSummaryMetadata = ResultsReceiverTwirpEndpoint + "CreateStepSummaryMetadata";
public static readonly string GetStepLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepLogsSignedBlobURL";
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string ResultsProtoApiV1Endpoint = "twirp/github.actions.results.api.v1.WorkflowStepUpdateService/";
public static readonly string WorkflowStepsUpdate = ResultsProtoApiV1Endpoint + "WorkflowStepsUpdate";
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
}
}

View File

@@ -1,3 +1,4 @@
using GitHub.Runner.Common.Util;
using System;
using System.IO;
using System.Reflection;
@@ -12,7 +13,6 @@ namespace GitHub.Runner.Common.Tests
{
private HostContext _hc;
private CancellationTokenSource _tokenSource;
private const string EXPECTED_SECRET_MASK = "***";
[Fact]
[Trait("Level", "L0")]
@@ -95,11 +95,11 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Pass%20word%20123%21123"));
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Pass&lt;word&gt;123!123"));
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Pass''word''123!123"));
Assert.Equal("OlBh***==", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($":Password123!"))));
Assert.Equal("YTpQ***=", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"a:Password123!"))));
Assert.Equal("OlBh***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($":Password123!"))));
Assert.Equal("YTpQ***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"a:Password123!"))));
Assert.Equal("YWI6***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"ab:Password123!"))));
Assert.Equal("YWJjOlBh***==", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abc:Password123!"))));
Assert.Equal("YWJjZDpQ***=", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcd:Password123!"))));
Assert.Equal("YWJjOlBh***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abc:Password123!"))));
Assert.Equal("YWJjZDpQ***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcd:Password123!"))));
Assert.Equal("YWJjZGU6***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcde:Password123!"))));
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Password123!!123"));
Assert.Equal("123short123", _hc.SecretMasker.MaskSecrets("123short123"));
@@ -112,116 +112,6 @@ namespace GitHub.Runner.Common.Tests
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void Base64SecretMaskers()
{
// The following are good candidate strings for Base64 encoding because they include
// both standard and RFC 4648 Base64 digits in all offset variations.
// TeLL? noboDy~ SEcreT?
// tElL~ NEVER~ neveR?
// TIGht? Tight~ guard~
// pRIVAte~ guARd? TIghT~
// KeeP~ TIgHT? tIgHT~
// LoCk? TiGhT~ TIght~
// DIvULGe~ nObODY~ noBOdy?
// foreVER~ Tight~ GUaRd?
try
{
// Arrange.
Setup();
// Act.
_hc.SecretMasker.AddValue("TeLL? noboDy~ SEcreT?");
// The above string has the following Base64 variations based on the chop leading byte(s) method of Base64 aliasing:
var base64Variations = new[]
{
"VGVMTD8gbm9ib0R5fiBTRWNyZVQ/",
"ZUxMPyBub2JvRHl+IFNFY3JlVD8",
"TEw/IG5vYm9EeX4gU0VjcmVUPw",
// RFC 4648 (URL-safe Base64)
"VGVMTD8gbm9ib0R5fiBTRWNyZVQ_",
"ZUxMPyBub2JvRHl-IFNFY3JlVD8",
"TEw_IG5vYm9EeX4gU0VjcmVUPw"
};
var bookends = new[]
{
(string.Empty, string.Empty),
(string.Empty, "="),
(string.Empty, "=="),
(string.Empty, "==="),
("a", "z"),
("A", "Z"),
("abc", "abc"),
("ABC", "ABC"),
("0", "0"),
("00", "00"),
("000", "000"),
("123", "789"),
("`", "`"),
("'", "'"),
("\"", "\""),
("[", "]"),
("(", ")"),
("$(", ")"),
("{", "}"),
("${", "}"),
("!", "!"),
("!!", "!!"),
("%", "%"),
("%%", "%%"),
("_", "_"),
("__", "__"),
(":", ":"),
("::", "::"),
(";", ";"),
(";;", ";;"),
(":", string.Empty),
(";", string.Empty),
(string.Empty, ":"),
(string.Empty, ";"),
("VGVMTD8gbm9ib", "ZUxMPy"),
("VGVMTD8gbm9ib", "TEw/IG5vYm9EeX4"),
("ZUxMPy", "TEw/IG5vYm9EeX4"),
("VGVMTD8gbm9ib", string.Empty),
("TEw/IG5vYm9EeX4", string.Empty),
("ZUxMPy", string.Empty),
(string.Empty, "VGVMTD8gbm9ib"),
(string.Empty, "TEw/IG5vYm9EeX4"),
(string.Empty, "ZUxMPy"),
};
foreach (var variation in base64Variations)
{
foreach (var pair in bookends)
{
var (prefix, suffix) = pair;
var expected = string.Format("{0}{1}{2}", prefix, EXPECTED_SECRET_MASK, suffix);
var payload = string.Format("{0}{1}{2}", prefix, variation, suffix);
Assert.Equal(expected, _hc.SecretMasker.MaskSecrets(payload));
}
// Verify no masking is performed on a partial match.
for (int i = 1; i < variation.Length - 1; i++)
{
var fragment = variation[..i];
Assert.Equal(fragment, _hc.SecretMasker.MaskSecrets(fragment));
}
}
}
finally
{
// Cleanup.
Teardown();
}
}
[Theory]
[InlineData("secret&secret&secret", "secret&secret&\x0033[96msecret\x0033[0m", "***\x0033[96m***\x0033[0m")]
[InlineData("secret&secret+secret", "secret&\x0033[96msecret+secret\x0033[0m", "***\x0033[96m***\x0033[0m")]

View File

@@ -19,7 +19,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
public class ConfigurationManagerL0
{
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunnerDotcomServer> _dotcomServer;
private Mock<ILocationServer> _locationServer;
private Mock<ICredentialManager> _credMgr;
private Mock<IPromptManager> _promptManager;
@@ -56,7 +55,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_store = new Mock<IConfigurationStore>();
_extnMgr = new Mock<IExtensionManager>();
_rsaKeyManager = new Mock<IRSAKeyManager>();
_dotcomServer = new Mock<IRunnerDotcomServer>();
#if OS_WINDOWS
_serviceControlManager = new Mock<IWindowsServiceControlManager>();
@@ -73,13 +71,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
AuthorizationUrl = new Uri("http://localhost:8080/pipelines"),
};
var expectedRunner = new GitHub.DistributedTask.WebApi.Runner() { Name = expectedAgent.Name, Id = 1 };
expectedRunner.RunnerAuthorization = new GitHub.DistributedTask.WebApi.Runner.Authorization
{
ClientId = expectedAgent.Authorization.ClientId.ToString(),
AuthorizationUrl = new Uri("http://localhost:8080/pipelines"),
};
var connectionData = new ConnectionData()
{
InstanceId = Guid.NewGuid(),
@@ -115,10 +106,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_dotcomServer.Setup(x => x.GetRunnersAsync(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
_dotcomServer.Setup(x => x.GetRunnerGroupsAsync(It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedPools));
_dotcomServer.Setup(x => x.AddRunnerAsync(It.IsAny<int>(), It.IsAny<TaskAgent>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedRunner));
rsa = new RSACryptoServiceProvider(2048);
_rsaKeyManager.Setup(x => x.CreateKey()).Returns(rsa);
@@ -132,7 +119,6 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
tc.SetSingleton<IConfigurationStore>(_store.Object);
tc.SetSingleton<IExtensionManager>(_extnMgr.Object);
tc.SetSingleton<IRunnerServer>(_runnerServer.Object);
tc.SetSingleton<IRunnerDotcomServer>(_dotcomServer.Object);
tc.SetSingleton<ILocationServer>(_locationServer.Object);
#if OS_WINDOWS

View File

@@ -1,17 +1,12 @@
using System;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.WebApi;
using Moq;
using Sdk.RSWebApi.Contracts;
using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -23,8 +18,6 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IProcessChannel> _processChannel;
private Mock<IProcessInvoker> _processInvoker;
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunServer> _runServer;
private Mock<IConfigurationStore> _configurationStore;
public JobDispatcherL0()
@@ -32,7 +25,6 @@ namespace GitHub.Runner.Common.Tests.Listener
_processChannel = new Mock<IProcessChannel>();
_processInvoker = new Mock<IProcessInvoker>();
_runnerServer = new Mock<IRunnerServer>();
_runServer = new Mock<IRunServer>();
_configurationStore = new Mock<IConfigurationStore>();
}
@@ -147,7 +139,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully);
_runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
@@ -205,7 +197,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -213,75 +205,6 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions()
{
//Arrange
using (var hc = new TestHostContext(this))
{
int poolId = 1;
Int64 requestId = 1000;
int count = 0;
var trace = hc.GetTrace(nameof(DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions));
TaskCompletionSource<int> firstJobRequestRenewed = new();
CancellationTokenSource cancellationTokenSource = new();
TaskAgentJobRequest request = new();
PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(lockUntilProperty);
lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5));
hc.SetSingleton<IRunServer>(_runServer.Object);
hc.SetSingleton<IConfigurationStore>(_configurationStore.Object);
_configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 });
_ = _runServer.Setup(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.Returns(() =>
{
count++;
if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully)
{
trace.Info("First renew happens.");
}
if (count < 5)
{
var response = new RenewJobResponse()
{
LockedUntil = request.LockedUntil.Value
};
return Task.FromResult<RenewJobResponse>(response);
}
else if (count == 5)
{
cancellationTokenSource.CancelAfter(10000);
throw new TaskOrchestrationJobNotFoundException("");
}
else
{
throw new InvalidOperationException("Should not reach here.");
}
});
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
EnableRunServiceJobForJobDispatcher(jobDispatcher);
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
await jobDispatcher.RenewJobRequestAsync(GetAgentJobRequestMessage(), GetServiceEndpoint(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
_runServer.Verify(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -333,7 +256,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -389,9 +312,8 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once);
@@ -446,7 +368,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -499,7 +421,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -557,7 +479,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.True(cancellationTokenSource.IsCancellationRequested);
@@ -614,7 +536,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -678,7 +600,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -737,78 +659,5 @@ namespace GitHub.Runner.Common.Tests.Listener
Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent.");
}
}
private static void EnableRunServiceJobForJobDispatcher(JobDispatcher jobDispatcher)
{
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
}
private static ServiceEndpoint GetServiceEndpoint()
{
var serviceEndpoint = new ServiceEndpoint
{
Authorization = new EndpointAuthorization
{
Scheme = EndpointAuthorizationSchemes.OAuth
}
};
serviceEndpoint.Authorization.Parameters.Add("AccessToken", "token");
return serviceEndpoint;
}
private static AgentJobRequestMessage GetAgentJobRequestMessage()
{
var message = new AgentJobRequestMessage(
new TaskOrchestrationPlanReference()
{
PlanType = "Build",
PlanId = Guid.NewGuid(),
Version = 1
},
new TimelineReference()
{
Id = Guid.NewGuid()
},
Guid.NewGuid(),
"jobDisplayName",
"jobName",
null,
null,
new List<TemplateToken>(),
new Dictionary<string, VariableValue>()
{
{
"variables",
new VariableValue()
{
IsSecret = false,
Value = "variables"
}
}
},
new List<MaskHint>()
{
new MaskHint()
{
Type = MaskType.Variable,
Value = "maskHints"
}
},
new JobResources(),
new DictionaryContextData(),
new WorkspaceOptions(),
new List<JobStep>(),
new List<string>()
{
"fileTable"
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env")
);
return message;
}
}
}

View File

@@ -351,7 +351,7 @@ namespace GitHub.Runner.Common.Tests
Assert.False(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com")));
Assert.False(proxy.IsBypassed(new Uri("https://github.comm")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); // no_proxy has '.google.com', specifying only subdomains bypass
Assert.False(proxy.IsBypassed(new Uri("https://google.com")));
Assert.False(proxy.IsBypassed(new Uri("https://example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com:333")));
Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123")));
@@ -374,76 +374,6 @@ namespace GitHub.Runner.Common.Tests
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void BypassAllOnWildcardNoProxy()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com, * , example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void IgnoreWildcardInNoProxySubdomain()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "*.example.com");
var proxy = new RunnerWebProxy();
Assert.False(proxy.IsBypassed(new Uri("http://sub.example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WildcardNoProxyWorksWhenOtherNoProxyAreAround()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com,*,example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Concurrent;
using System.Globalization;
@@ -56,12 +56,9 @@ namespace GitHub.Runner.Common.Tests
}
var traceListener = new HostTraceListener(TraceFileName);
var encoders = new List<ValueEncoder>()
{
ValueEncoders.JsonStringEscape,
ValueEncoders.UriDataEscape
};
_secretMasker = new SecretMasker(encoders);
_secretMasker = new SecretMasker();
_secretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
_secretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
_traceManager = new TraceManager(traceListener, null, _secretMasker);
_trace = GetTrace(nameof(TestHostContext));

View File

@@ -1,3 +1,4 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.IO;
@@ -930,36 +931,6 @@ namespace GitHub.Runner.Common.Tests.Util
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void LoadObject_ThrowsOnRequiredLoadObject()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
// Arrange: Create a directory with a file.
string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName());
string file = Path.Combine(directory, "empty file");
Directory.CreateDirectory(directory);
File.WriteAllText(path: file, contents: "");
Assert.Throws<ArgumentNullException>(() => IOUtil.LoadObject<RunnerSettings>(file, true));
file = Path.Combine(directory, "invalid type file");
File.WriteAllText(path: file, contents: " ");
Assert.Throws<ArgumentException>(() => IOUtil.LoadObject<RunnerSettings>(file, true));
// Cleanup.
if (Directory.Exists(directory))
{
Directory.Delete(directory, recursive: true);
}
}
}
private static async Task CreateDirectoryReparsePoint(IHostContext context, string link, string target)
{
#if OS_WINDOWS

View File

@@ -1,4 +1,5 @@
using GitHub.Runner.Sdk;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System.Globalization;
using Xunit;
@@ -185,19 +186,5 @@ namespace GitHub.Runner.Common.Tests.Util
Assert.False(result9, $"'{undefineString3}' should convert to false.");
}
}
[Theory]
[InlineData("", "")]
[InlineData("(())", "[[]]")]
[InlineData("()()", "[][]")]
[InlineData(" Liquorix kernel OS Description is poorly formatted (linux version ", "Liquorix kernel OS Description is poorly formatted [linux version")]
[InlineData("Liquorix kernel OS Description is poorly formatted (linux version", "Liquorix kernel OS Description is poorly formatted [linux version")]
[InlineData("()((.", "[][[.")]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void SanitizeUserAgentHeader(string input, string expected)
{
Assert.Equal(expected, StringUtil.SanitizeUserAgentHeader(input));
}
}
}

View File

@@ -3,14 +3,20 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Worker.Handlers;
using Moq;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -143,12 +149,11 @@ namespace GitHub.Runner.Common.Tests.Worker
_context.Add("matrix", matrixData);
// Act
// Should report success with no updated required if there's already a valid display name.
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
// Should not do anything if we don't have a displayNameToken to expand
var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext);
// Assert
Assert.True(validDisplayName);
Assert.False(updated);
Assert.False(didUpdateDisplayName);
Assert.Equal(actionDisplayName, _actionRunner.DisplayName);
}
@@ -178,51 +183,13 @@ namespace GitHub.Runner.Common.Tests.Worker
// Act
// Should expand the displaynameToken and set the display name to that
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext);
// Assert
Assert.True(validDisplayName);
Assert.True(updated);
Assert.True(didUpdateDisplayName);
Assert.Equal(expectedString, _actionRunner.DisplayName);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void IgnoreDisplayNameTokenWhenDisplayNameIsExplicitlySet()
{
var explicitDisplayName = "Explcitly Set Name";
// Arrange
Setup();
var actionId = Guid.NewGuid();
var action = new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
DisplayName = explicitDisplayName,
DisplayNameToken = new BasicExpressionToken(null, null, null, "matrix.node"),
};
_actionRunner.Action = action;
var matrixData = new DictionaryContextData
{
["node"] = new StringContextData("8")
};
_context.Add("matrix", matrixData);
// Act
// Should ignore the displayNameToken since there's already an explicit value for DisplayName
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
// Assert
Assert.True(validDisplayName);
Assert.False(updated);
Assert.Equal(explicitDisplayName, _actionRunner.DisplayName);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
@@ -251,11 +218,10 @@ namespace GitHub.Runner.Common.Tests.Worker
// Act
// Should expand the displaynameToken and set the display name to that
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext);
// Assert
Assert.True(validDisplayName);
Assert.True(updated);
Assert.True(didUpdateDisplayName);
Assert.Equal("Run 8", _actionRunner.DisplayName);
}
@@ -280,11 +246,10 @@ namespace GitHub.Runner.Common.Tests.Worker
// Act
// Should expand the displaynameToken and set the display name to that
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext);
// Assert
Assert.True(validDisplayName);
Assert.True(updated);
Assert.True(didUpdateDisplayName);
Assert.Equal("Run TestImageName:latest", _actionRunner.DisplayName);
}
@@ -307,11 +272,10 @@ namespace GitHub.Runner.Common.Tests.Worker
// Act
// Should not do anything if we don't have context on the display name
var validDisplayName = _actionRunner.EvaluateDisplayName(_context, _actionRunner.ExecutionContext, out bool updated);
var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext);
// Assert
Assert.False(validDisplayName);
Assert.False(updated);
Assert.False(didUpdateDisplayName);
// Should use the pretty display name until we can eval
Assert.Equal("${{ matrix.node }}", _actionRunner.DisplayName);
}

View File

@@ -711,63 +711,6 @@ namespace GitHub.Runner.Common.Tests.Worker
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void PublishStepResult_EmbeddedStep()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message.
TaskOrchestrationPlanReference plan = new();
TimelineReference timeline = new();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Arrange: Setup the paging logger.
var pagingLogger = new Mock<IPagingLogger>();
var pagingLogger2 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
hc.EnqueueInstance(pagingLogger.Object);
hc.EnqueueInstance(pagingLogger2.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Act.
ec.InitializeJob(jobRequest, CancellationToken.None);
ec.Start();
var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true);
embeddedStep.Start();
embeddedStep.StepTelemetry.Type = "node16";
embeddedStep.StepTelemetry.Action = "actions/checkout";
embeddedStep.StepTelemetry.Ref = "v2";
embeddedStep.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" });
embeddedStep.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" });
ec.Complete();
// Assert.
Assert.Equal(1, ec.Global.StepsResult.Count);
Assert.Equal(TaskResult.Succeeded, ec.Global.StepsResult.Single().Conclusion);
}
}
private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{
var hc = new TestHostContext(this, testName);

View File

@@ -1,6 +1,5 @@
using System;
using System.Runtime.CompilerServices;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;

View File

@@ -16,10 +16,9 @@ namespace GitHub.Runner.Common.Tests.Worker
private IExecutionContext _jobEc;
private JobRunner _jobRunner;
private List<IStep> _initResult = new();
private Pipelines.AgentJobRequestMessage _message;
private CancellationTokenSource _tokenSource;
private Mock<IJobServer> _jobServer;
private Mock<IRunServer> _runServer;
private Mock<IJobServerQueue> _jobServerQueue;
private Mock<IConfigurationStore> _config;
private Mock<IExtensionManager> _extensions;
@@ -39,7 +38,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_extensions = new Mock<IExtensionManager>();
_jobExtension = new Mock<IJobExtension>();
_jobServer = new Mock<IJobServer>();
_runServer = new Mock<IRunServer>();
_jobServerQueue = new Mock<IJobServerQueue>();
_stepRunner = new Mock<IStepsRunner>();
_logger = new Mock<IPagingLogger>();
@@ -57,6 +55,33 @@ namespace GitHub.Runner.Common.Tests.Worker
_jobRunner = new JobRunner();
_jobRunner.Initialize(hc);
TaskOrchestrationPlanReference plan = new();
TimelineReference timeline = new Timeline(Guid.NewGuid());
Guid jobId = Guid.NewGuid();
_message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, testName, testName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
_message.Variables[Constants.Variables.System.Culture] = "en-US";
_message.Resources.Endpoints.Add(new ServiceEndpoint()
{
Name = WellKnownServiceEndpointNames.SystemVssConnection,
Url = new Uri("https://pipelines.actions.githubusercontent.com"),
Authorization = new EndpointAuthorization()
{
Scheme = "Test",
Parameters = {
{"AccessToken", "token"}
}
},
});
_message.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
_message.ContextData.Add("github", new Pipelines.ContextData.DictionaryContextData());
_initResult.Clear();
_jobExtension.Setup(x => x.InitializeJob(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.AgentJobRequestMessage>())).
@@ -77,7 +102,6 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SetSingleton(_config.Object);
hc.SetSingleton(_jobServer.Object);
hc.SetSingleton(_runServer.Object);
hc.SetSingleton(_jobServerQueue.Object);
hc.SetSingleton(_stepRunner.Object);
hc.SetSingleton(_extensions.Object);
@@ -89,43 +113,6 @@ namespace GitHub.Runner.Common.Tests.Worker
return hc;
}
private Pipelines.AgentJobRequestMessage GetMessage(String messageType = JobRequestMessageTypes.PipelineAgentJobRequest, [CallerMemberName] String testName = "")
{
TaskOrchestrationPlanReference plan = new();
TimelineReference timeline = new Timeline(Guid.NewGuid());
Guid jobId = Guid.NewGuid();
var message = new Pipelines.AgentJobRequestMessage(
plan,
timeline,
jobId,
testName,
testName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null,
messageType: messageType);
message.Variables[Constants.Variables.System.Culture] = "en-US";
message.Resources.Endpoints.Add(new ServiceEndpoint()
{
Name = WellKnownServiceEndpointNames.SystemVssConnection,
Url = new Uri("https://pipelines.actions.githubusercontent.com"),
Authorization = new EndpointAuthorization()
{
Scheme = "Test",
Parameters = {
{"AccessToken", "token"}
}
},
});
message.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
message.ContextData.Add("github", new Pipelines.ContextData.DictionaryContextData());
return message;
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
@@ -136,7 +123,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_jobExtension.Setup(x => x.InitializeJob(It.IsAny<IExecutionContext>(), It.IsAny<Pipelines.AgentJobRequestMessage>()))
.Throws(new Exception());
await _jobRunner.RunAsync(GetMessage(), _tokenSource.Token);
await _jobRunner.RunAsync(_message, _tokenSource.Token);
Assert.Equal(TaskResult.Failed, _jobEc.Result);
_stepRunner.Verify(x => x.RunAsync(It.IsAny<IExecutionContext>()), Times.Never);
@@ -154,24 +141,11 @@ namespace GitHub.Runner.Common.Tests.Worker
.Throws(new OperationCanceledException());
_tokenSource.Cancel();
await _jobRunner.RunAsync(GetMessage(), _tokenSource.Token);
await _jobRunner.RunAsync(_message, _tokenSource.Token);
Assert.Equal(TaskResult.Canceled, _jobEc.Result);
_stepRunner.Verify(x => x.RunAsync(It.IsAny<IExecutionContext>()), Times.Never);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async Task WorksWithRunnerJobRequestMessageType()
{
using (TestHostContext hc = CreateTestContext())
{
var message = GetMessage(JobRequestMessageTypes.RunnerJobRequest);
await _jobRunner.RunAsync(message, _tokenSource.Token);
Assert.Equal(TaskResult.Succeeded, _jobEc.Result);
}
}
}
}

View File

@@ -5,7 +5,6 @@ using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Container;

View File

@@ -25,25 +25,25 @@ runs:
- run: exit ${{ inputs.exit-code }}
shell: bash
- run: echo "default=true" >> $GITHUB_OUTPUT
- run: echo "::set-output name=default::true"
id: default-conditional
shell: bash
- run: echo "success=true" >> $GITHUB_OUTPUT
- run: echo "::set-output name=success::true"
id: success-conditional
shell: bash
if: success()
- run: echo "failure=true" >> $GITHUB_OUTPUT
- run: echo "::set-output name=failure::true"
id: failure-conditional
shell: bash
if: failure()
- run: echo "always=true" >> $GITHUB_OUTPUT
- run: echo "::set-output name=always::true"
id: always-conditional
shell: bash
if: always()
- run: echo "failed"
shell: bash
if: ${{ inputs.exit-code == 1 && failure() }}
if: ${{ inputs.exit-code == 1 && failure() }}

View File

@@ -22,7 +22,7 @@ DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
PACKAGE_DIR="$SCRIPT_DIR/../_package"
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
DOTNETSDK_VERSION="6.0.405"
DOTNETSDK_VERSION="6.0.300"
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
RUNNER_VERSION=$(cat runnerversion)
@@ -203,13 +203,6 @@ function runtest ()
dotnet msbuild -t:test -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:RunnerVersion="${RUNNER_VERSION}" ./dir.proj || failed "failed tests"
}
function format()
{
heading "Formatting..."
files="$(git status -s "*.cs" | awk '{print $2}' | tr '\n' ' ')"
dotnet format ${SCRIPT_DIR}/ActionsRunner.sln --exclude / --include $files || failed "failed formatting"
}
function package ()
{
if [ ! -d "${LAYOUT_DIR}/bin" ]; then
@@ -367,9 +360,7 @@ case $DEV_CMD in
"l") layout;;
"package") package;;
"p") package;;
"format") format;;
"f") format;;
*) echo "Invalid cmd. Use build(b), test(t), layout(l), package(p), or format(f)";;
*) echo "Invalid cmd. Use build(b), test(t), layout(l) or package(p)";;
esac
popd

View File

@@ -1,5 +1,5 @@
{
"sdk": {
"version": "6.0.405"
"version": "6.0.300"
}
}

View File

@@ -1 +1 @@
2.303.0
2.301.1