Compare commits

...

33 Commits

Author SHA1 Message Date
Tingluo Huang
9ef4121b5b Create 2.304.0 runner release 2023-04-26 16:04:26 -04:00
Tingluo Huang
58f3ff55aa Prepare 2.304.0 runner release. (#2569) 2023-04-26 15:48:13 -04:00
Tingluo Huang
6353ac84d7 Add orchestrationId to useragent for better correlation. (#2568)
* Add orchestrationId to useragent for better correlation.

* .
2023-04-26 19:15:19 +00:00
Yang Cao
ad9a4a45d1 Bypass job server when run service is enabled (#2516)
* Only upload to Results with new job message type

* No need to have separate websocketFeedServer

* Linting fix

* Update src/Runner.Common/JobServerQueue.cs

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* add connection timeout

* Consolidate initializing webclient to result client

* Retry websocket delivery for console logs

* Linter fix

* Do not give up for good, reconnect again in 10 minutes

* Has to reset delivered

* Only first time retry 3 times to connect to websocket

---------

Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-04-26 12:51:56 -04:00
John Wesley Walker III
a41397ae93 Rename AcquireJobRequest::StreamID to AcquireJobRequest::JobMessageID (#2547)
* Rename AcquireJobRequest::StreamID to AcquireJobRequest::JobMessageID to match corresponding server-side update.
* appeased the linter
* Added unit tests to prove AcquireJobRequest serialization/deserialization is as-expected.
* Distinguish unit test variations.
* Incorporated PR Feedback.
2023-04-26 18:31:41 +02:00
Erez Testiler
c4d41e95cb Add *.ghe.localhost domains to hosted server check (#2536) 2023-04-24 12:52:28 -04:00
Maggie Spletzer
af6ed41bcb Adding curl --retry-all-errors for external tool downloads (#2557)
* adding retry-all-errors based on curl version >= 7.71.0

* adding logging

* fixing conditional logic
2023-04-20 21:01:11 +00:00
Tingluo Huang
e8b2380a20 Limit the time we wait for waiting websocket to connect. (#2554) 2023-04-19 10:20:00 -04:00
Maggie Spletzer
38ab9dedf4 Adding curl retry for external tool downloads (#2552)
* adding retry for flaky downloads

* adding comment

* Update src/Misc/externals.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

---------

Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-04-18 18:31:46 +00:00
Yashwanth Anantharaju
c7629700ad Handle non success raw http responses (#2541)
* handle non success responses

* fix format

* nits

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

---------

Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-04-14 16:17:20 -04:00
Yang Cao
b9a0b5dba9 If conclusion is not set, we cannot get default value (#2535) 2023-04-13 13:03:57 -04:00
Yang Cao
766cefe599 Also send in conclusion for steps (#2531) 2023-04-13 15:19:00 +02:00
Eli Entelis
2ecd7d2fc6 which handles broken symlink & unit test added (#2150) (#2196)
* which handles broken symlink & unit test added (#2150)

* Update src/Runner.Sdk/Util/WhichUtil.cs

Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>

* fix pr comments

* trace log added,  in case we find a symlink that is broken

* add check in case the target of the symlink is a relative path; added test that check symlink that targets a full path; added test that check symlink that targets a relative path;

* fix tests

* fix tests for linux

---------

Co-authored-by: Eli Entelis <eli.entelis@hexagon.com>
Co-authored-by: Eli Entelis <42981948+Eliminator1999@users.noreply.github.com>
Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2023-04-06 22:15:11 -04:00
Yang Cao
0484afeec7 Update Runner to send step updates to Results (#2510)
* Also send Steps update to Results service

* Refactor to separate results server from current job server

* If hit any error while uploading to Results, skip Results upload

* Add proxy authentication and buffer request for WinHttpHandler

* Remove unnecessary null guard

* Also send Results telemetry when step update fails

* IResultsServer is not disposable
2023-04-03 16:31:13 -04:00
Nikola Jokic
1ceb1a67f2 Guard against NullReference while creating HostContext (#2343)
* Guard against NullReference while creating HostContext

* Throw on IOUtil loading object if content is empty or object is null

* removed unused dependency

* Changed exceptions to ArgumentNullException and ArgumentException

* fixed my mistake on LoadObject

* fixed tests

* trigger workflows

* trigger workflows

* encode files using utf8
2023-03-30 15:39:09 +02:00
Luke Tomlinson
9f778b814d Register Runners against V2 servers (#2505)
* Parse runners and send publicKey

* wip

* Fix conflicts

* Cleanup

* Cleanup

* fix test

* fix test

* Add trace for broker message listener

* Feedback

* refactor

* remove dead code

* Remove old comment
2023-03-28 15:45:00 -04:00
Luke Tomlinson
92258f9ea1 Listen directly to Broker for messages (#2500) 2023-03-24 14:00:34 -04:00
Bassem Dghaidi
74eeb82684 ADR: Runner Image Tags (#2494)
* WIP

* WIP

* Add context

* Add 2494-runner-image-tags ADR draft

* Fix ADR title

* Add more information to option 2

* Add decision

* Fix status
2023-03-23 12:09:10 -04:00
Matisse Hack
0e7ca9aedb Fix JIT configurations on Windows (#2497)
* Fix JIT configurations on Windows

* Update src/Runner.Listener/Runner.cs
2023-03-21 15:04:50 -04:00
Tatyana Kostromskaya
bb7b1e8259 Update runner to handle Dotcom/runner-admin based registration flow (#2487) 2023-03-21 16:05:32 +01:00
Tingluo Huang
440c81b770 Bump container hooks version to 0.3.1 (#2496) 2023-03-20 14:53:47 +00:00
Bassem Dghaidi
9958fc0374 Update container hooks version to 0.3.0 (#2492) 2023-03-17 06:00:55 -04:00
Tingluo Huang
81b07eb1c4 Prepare runner release 2.303.0 (#2482) 2023-03-10 10:45:56 +00:00
Jongwoo Han
514ecec5a3 Replace deprecated command with environment file (#2429)
Signed-off-by: jongwooo <jongwooo.han@gmail.com>
2023-03-09 22:09:05 -05:00
Ajay
128b212b13 Support matrix context in output keys (#2477) 2023-03-09 21:30:28 -05:00
Nikola Jokic
2dfa28e6e0 Add update certificates to ./run.sh if RUNNER_UPDATE_CA_CERTS env is set (#2471)
* Included entrypoint that will update certs and run ./run.sh

* update ca if RUNNER_UPDATE_CA env is set

* changed env variable to RUNNER_UPDATE_TRUST_STORE

* moved entrypoint to be run.sh, removed Dockerfile entrypoint, added envvar that will update certs

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* removed doc comment on func

---------

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>
Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-03-08 12:29:55 -05:00
Nikola Jokic
fd96246580 Exit on deprication error (#2299)
* terminate the runner on deprication message

* added TaskAgentVersion exception to catch deprication

* AccessDenied exception with inner exception InvalidTaskAgent

* Access denied exception in program and in message listener

* Fixed copy

* remove trace message from message listener
2023-03-07 14:09:10 +01:00
Ferenc Hammerl
8ef48200b4 Bypass all proxies for all hosts if no_proxy='*' is set (#2395)
* Bypass top level domain even if no_proxy specified it with leading '.'

E.g. no_proxy='.github.com' will now bypass github.com.

* Bypass proxy on all hosts when no_proxy is * (wildcard)

* Undo '.' stripping

* Simplify unit tests

* Respect wildcard even if it's one of many no_proxy items
2023-03-06 11:01:45 +01:00
Tingluo Huang
d61b27b839 Change runner image to make user/folder align with ubuntu-latest hosted runner. (#2469) 2023-03-02 13:42:23 -05:00
Nikola Jokic
542e8a3c98 Runner service exit after consecutive re-try exits (#2426)
* Runner service exit after consecutive re-try exits

* Rename failure counts and include reset count on runner listening for jobs

* Changed from graceful shutdown to stopping=true
2023-02-28 16:00:36 +01:00
Yashwanth Anantharaju
e8975514fd call run service renewjob (#2461)
* call run service renewjob

* format

* formatting

* make it private and expose internals

* lint

* fix exception class

* lint

* fix test as well
2023-02-27 16:50:28 +00:00
Yang Cao
0befa62f64 Add job log upload support (#2447)
* Refactor and add job log upload support

* Rename method to be consistent
2023-02-21 09:55:47 -05:00
Yang Cao
aaf02ab34c Properly guard upload (#2439)
* Revert "Revert "Uploading step logs to Results as well  (#2422)" (#2437)"

This reverts commit 8c096baf49.

* Properly guard the upload to results feature

* Delete skipped file if deletesource is true
2023-02-17 10:31:41 -05:00
59 changed files with 3135 additions and 393 deletions

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha
name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noexternals
name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime
name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256

View File

@@ -0,0 +1,65 @@
# ADR 2494: Runner Image Tags
**Date**: 2023-03-17
**Status**: Accepted<!-- |Accepted|Rejected|Superceded|Deprecated -->
## Context
Following the [adoption of actions-runner-controller by GitHub](https://github.com/actions/actions-runner-controller/discussions/2072) and the introduction of the new runner scale set autoscaling mode, we needed to provide a basic runner image that could be used off the shelf without much friction.
The [current runner image](https://github.com/actions/runner/pkgs/container/actions-runner) is published to GHCR. Each release of this image is tagged with the runner version and the most recent release is also tagged with `latest`.
While the use of `latest` is common practice, we recommend that users pin a specific version of the runner image for a predictable runtime and improved security posture. However, we still notice that a large number of end users are relying on the `latest` tag & raising issues when they encounter problems.
Add to that, the community actions-runner-controller maintainers have issued a [deprecation notice](https://github.com/actions/actions-runner-controller/issues/2056) of the `latest` tag for the existing runner images (https://github.com/orgs/actions-runner-controller/packages).
## Decision
Proceed with Option 2, keeping the `latest` tag and adding the `NOTES.txt` file to our helm charts with the notice.
### Option 1: Remove the `latest` tag
By removing the `latest` tag, we have to proceed with either of these options:
1. Remove the runner image reference in the `values.yaml` provided with the `gha-runner-scale-set` helm chart and mark these fields as required so that users have to explicitly specify a runner image and a specific tag. This will obviously introduce more friction for users who want to start using actions-runner-controller for the first time.
```yaml
spec:
containers:
- name: runner
image: ""
tag: ""
command: ["/home/runner/run.sh"]
```
1. Pin a specific runner image tag in the `values.yaml` provided with the `gha-runner-scale-set` helm chart. This will reduce friction for users who want to start using actions-runner-controller for the first time but will require us to update the `values.yaml` with every new runner release.
```yaml
spec:
containers:
- name: runner
image: "ghcr.io/actions/actions-runner"
tag: "v2.300.0"
command: ["/home/runner/run.sh"]
```
### Option 2: Keep the `latest` tag
Keeping the `latest` tag is also a reasonable option especially if we don't expect to make any breaking changes to the runner image. We could enhance this by adding a [NOTES.txt](https://helm.sh/docs/chart_template_guide/notes_files/) to the helm chart which will be displayed to the user after a successful helm install/upgrade. This will help users understand the implications of using the `latest` tag and how to pin a specific version of the runner image.
The runner image release workflow will need to be updated so that the image is pushed to GHCR and tagged only when the runner rollout has reached all scale units.
## Consequences
Proceeding with **option 1** means:
1. We will enhance the runtime predictability and security posture of our end users
1. We will have to update the `values.yaml` with every new runner release (that can be automated)
1. We will introduce friction for users who want to start using actions-runner-controller for the first time
Proceeding with **option 2** means:
1. We will have to continue to maintain the `latest` tag
1. We will assume that end users will be able to handle the implications of using the `latest` tag
1. Runner image release workflow needs to be updated

View File

@@ -2,7 +2,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.2.0
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y
@@ -24,11 +24,26 @@ RUN export DOCKER_ARCH=x86_64 \
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV RUNNER_ALLOW_RUNASROOT=1
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
WORKDIR /actions-runner
COPY --from=build /actions-runner .
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner

View File

@@ -1,15 +1,18 @@
## Features
- Add support for ghe.com domain (#2420)
- Add docker cli to the runner image. (#2425)
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
- Add `*.ghe.localhost` domains to hosted server check (#2536)
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
## Bugs
- Fix URL construction bug for RunService (#2396)
- Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313)
- Replace '(' and ')' with '[' and '] from OS.Description for fixing User-Agent header validation (#2288)
- Fix JIT configurations on Windows (#2497)
- Guard against NullReference while creating HostContext (#2343)
- Handles broken symlink in `Which` (#2150, #2196)
- Adding curl retry for external tool downloads (#2552, #2557)
- Limit the time we wait for waiting websocket to connect. (#2554)
## Misc
- Bump dotnet sdk to latest version. (#2392)
- Start calling run service for job completion (#2412, #2423)
- Bump container hooks version to 0.3.1 in runner image (#2496)
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release>
2.304.0

View File

@@ -55,12 +55,23 @@ function acquireExternalTool() {
# Download from source to the partial file.
echo "Downloading $download_source"
mkdir -p "$(dirname "$download_target")" || checkRC 'mkdir'
CURL_VERSION=$(curl --version | awk 'NR==1{print $2}')
echo "Curl version: $CURL_VERSION"
# curl -f Fail silently (no output at all) on HTTP errors (H)
# -k Allow connections to SSL sites without certs (H)
# -S Show error. With -s, make curl show errors when they occur
# -L Follow redirects (H)
# -o FILE Write to FILE instead of stdout
curl -fkSL -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
else
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
fi
# Move the partial file to the download target.
mv "$partial_target" "$download_target" || checkRC 'mv'

View File

@@ -24,7 +24,8 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -62,7 +63,8 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -92,24 +94,38 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
unknownFailureRetryCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
);
gracefulShutdown();
return;
stopping = true
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

View File

@@ -53,6 +53,33 @@ runWithManualTrap() {
done
}
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $*
else

View File

@@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(BrokerServer))]
public interface IBrokerServer : IRunnerService
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
}
public sealed class BrokerServer : RunnerService, IBrokerServer
{
private bool _hasConnection;
private Uri _brokerUri;
private RawConnection _connection;
private BrokerHttpClient _brokerHttpClient;
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
{
_brokerUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_brokerHttpClient = await _connection.GetClientAsync<BrokerHttpClient>();
_hasConnection = true;
}
private void CheckConnection()
{
if (!_hasConnection)
{
throw new InvalidOperationException($"SetConnection");
}
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{
CheckConnection();
var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage;
}
}
}

View File

@@ -50,6 +50,12 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrlV2 { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace GitHub.Runner.Common
{
@@ -261,6 +261,7 @@ namespace GitHub.Runner.Common
public static readonly string AccessToken = "system.accessToken";
public static readonly string Culture = "system.culture";
public static readonly string PhaseDisplayName = "system.phaseDisplayName";
public static readonly string OrchestrationId = "system.orchestrationId";
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
@@ -220,7 +220,7 @@ namespace GitHub.Runner.Common
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile, true);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}

View File

@@ -24,13 +24,11 @@ namespace GitHub.Runner.Common
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
void InitializeResultsClient(Uri uri, string token);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -44,7 +42,6 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
@@ -148,12 +145,6 @@ namespace GitHub.Runner.Common
InitializeWebsocketClient(TimeSpan.Zero);
}
public void InitializeResultsClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
@@ -208,13 +199,15 @@ namespace GitHub.Runner.Common
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
Trace.Info($"Successfully started websocket client.");
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
this._websocketClient = null;
}
}
@@ -316,15 +309,6 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{

View File

@@ -17,10 +17,10 @@ namespace GitHub.Runner.Common
TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -31,7 +31,7 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForSummaryUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Common
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<SummaryUploadFileInfo> _summaryFileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
@@ -60,15 +60,17 @@ namespace GitHub.Runner.Common
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _summaryUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
private IJobServer _jobServer;
private IResultsServer _resultsServer;
private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
private bool _queueInProcess = false;
private bool _resultsServiceOnly = false;
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
@@ -84,19 +86,27 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_jobServer = hostContext.GetService<IJobServer>();
_resultsServer = hostContext.GetService<IResultsServer>();
}
public void Start(Pipelines.AgentJobRequestMessage jobRequest)
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
{
Trace.Entering();
_resultsServiceOnly = resultServiceOnly;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (!resultServiceOnly)
{
_jobServer.InitializeWebsocketClient(serviceEndPoint);
}
// This code is usually wrapped by an instance of IExecutionContext which isn't available here.
jobRequest.Variables.TryGetValue("system.github.results_endpoint", out VariableValue resultsEndpointVariable);
@@ -107,10 +117,18 @@ namespace GitHub.Runner.Common
!string.IsNullOrEmpty(accessToken) &&
!string.IsNullOrEmpty(resultsReceiverEndpoint))
{
string liveConsoleFeedUrl = null;
Trace.Info("Initializing results client");
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
}
if (resultServiceOnly
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
&& !string.IsNullOrEmpty(feedStreamUrl))
{
liveConsoleFeedUrl = feedStreamUrl;
}
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
_resultsClientInitiated = true;
}
if (_queueInProcess)
{
@@ -140,12 +158,12 @@ namespace GitHub.Runner.Common
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_summaryUploadDequeueTask = ProcessSummaryUploadQueueAsync();
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _summaryUploadDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_queueInProcess = true;
}
@@ -176,9 +194,9 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results summary upload queue.");
await ProcessSummaryUploadQueueAsync(runOnce: true);
Trace.Info("Results summary upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
@@ -189,6 +207,9 @@ namespace GitHub.Runner.Common
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info($"Disposing results server ...");
await _resultsServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
@@ -230,21 +251,43 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource)
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new SummaryUploadFileInfo()
var newFile = new ResultsUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
StepId = stepRecordId.ToString(),
DeleteSource = deleteSource
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, stepRecordId);
_summaryFileUploadQueue.Enqueue(newFile);
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
@@ -345,7 +388,14 @@ namespace GitHub.Runner.Common
// Give at most 60s for each request.
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)))
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
if (_resultsServiceOnly)
{
await _resultsServer.AppendLiveConsoleFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
}
if (_firstConsoleOutputs)
@@ -437,18 +487,18 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessSummaryUploadQueueAsync(bool runOnce = false)
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<SummaryUploadFileInfo> filesToUpload = new();
SummaryUploadFileInfo dequeueFile;
while (_summaryFileUploadQueue.TryDequeue(out dequeueFile))
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file upload.
// process at most 10 file uploads.
if (!runOnce && filesToUpload.Count > 10)
{
break;
@@ -459,7 +509,7 @@ namespace GitHub.Runner.Common
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} summary files in one shot through results service.");
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
}
int errorCount = 0;
@@ -467,30 +517,38 @@ namespace GitHub.Runner.Common
{
try
{
await UploadSummaryFile(file);
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
}
catch (Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during summary file upload to results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
Trace.Info("Catch exception during summary file upload to results, keep going since the process is best effort.");
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
}
finally
{
errorCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
_resultsClientInitiated = false;
SendResultsTelemetry(ex);
}
}
Trace.Info("Tried to upload {0} summary files to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
@@ -499,11 +557,24 @@ namespace GitHub.Runner.Common
}
else
{
await Task.Delay(_delayForSummaryUploadDequeue);
await Task.Delay(_delayForResultsUploadDequeue);
}
}
}
private void SendResultsTelemetry(Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
}
private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false)
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
@@ -551,7 +622,7 @@ namespace GitHub.Runner.Common
foreach (var detailTimeline in update.PendingRecords.Where(r => r.Details != null))
{
if (!_allTimelines.Contains(detailTimeline.Details.Id))
if (!_resultsServiceOnly && !_allTimelines.Contains(detailTimeline.Details.Id))
{
try
{
@@ -573,7 +644,27 @@ namespace GitHub.Runner.Common
try
{
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
if (!_resultsServiceOnly)
{
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
try
{
if (_resultsClientInitiated)
{
await _resultsServer.UpdateResultsWorkflowStepsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
}
catch (Exception e)
{
Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e);
_resultsClientInitiated = false;
SendResultsTelemetry(e);
}
if (_bufferedRetryRecords.Remove(update.TimelineId))
{
Trace.Verbose("Cleanup buffered timeline record for timeline: {0}.", update.TimelineId);
@@ -733,27 +824,30 @@ namespace GitHub.Runner.Common
bool uploadSucceed = false;
try
{
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
if (!_resultsServiceOnly)
{
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
{
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
}
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
}
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
}
else
{
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
else
{
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
}
}
}
@@ -776,16 +870,50 @@ namespace GitHub.Runner.Common
}
}
private async Task UploadSummaryFile(SummaryUploadFileInfo file)
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _resultsServer.CreateResultsStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _resultsServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _resultsServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
if (!_resultsClientInitiated)
{
return;
}
bool uploadSucceed = false;
try
{
// Upload the step summary
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateStepSymmaryAsync(file.PlanId, file.JobId, file.StepId, file.Path, cancellationTokenSource.Token);
await uploadHandler(file);
uploadSucceed = true;
}
finally
@@ -798,7 +926,7 @@ namespace GitHub.Runner.Common
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete success results uploaded summary file.");
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Error(ex);
}
}
@@ -822,18 +950,20 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class SummaryUploadFileInfo
internal class ResultsUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public string StepId { get; set; }
public Guid RecordId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}
internal class ConsoleLineInfo
{
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)

View File

@@ -21,6 +21,12 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -32,6 +38,13 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -39,8 +52,10 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -60,11 +75,17 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
Create();
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -78,21 +99,25 @@ namespace GitHub.Runner.Common
}
}
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
}
private void Create()
{
NewPage();
EndBlock(true);
}
private void NewPage()
@@ -117,5 +142,27 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -0,0 +1,262 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Headers;
using System.Net.WebSockets;
using System.Security;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi.Utilities.Internal;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService, IAsyncDisposable
{
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
// logging and console
Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken);
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
}
public sealed class ResultServer : RunnerService, IResultsServer
{
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private DateTime? _lastConnectionFailure;
private static readonly TimeSpan MinDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan MaxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private Task _websocketConnectTask;
private String _liveConsoleFeedUrl;
private string _token;
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
_token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{
_liveConsoleFeedUrl = liveConsoleFeedUrl;
InitializeWebsocketClient(liveConsoleFeedUrl, token, TimeSpan.Zero, retryConnection: true);
}
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock,
lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
try
{
var timelineRecords = records.ToList();
return _resultsClient.UpdateWorkflowStepsAsync(planId, new List<TimelineRecord>(timelineRecords),
cancellationToken: cancellationToken);
}
catch (Exception ex)
{
// Log error, but continue as this call is best-effort
Trace.Info($"Failed to update steps status due to {ex.GetType().Name}");
Trace.Error(ex);
}
}
throw new InvalidOperationException("Results client is not initialized.");
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
{
if (!string.IsNullOrEmpty(accessToken))
{
Trace.Info($"No access token from server");
return;
}
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{
Trace.Info($"No live console feed url from server");
return;
}
Trace.Info($"Creating websocket client ..." + liveConsoleFeedUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
var userAgentValues = new List<ProductInfoHeaderValue>();
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
userAgentValues.AddRange(HostContext.UserAgents);
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
// during initialization, retry upto 3 times to setup connection
this._websocketConnectTask = ConnectWebSocketClient(liveConsoleFeedUrl, delay, retryConnection);
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay, bool retryConnection = false)
{
bool connected = false;
int retries = 0;
do
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
Trace.Info($"Successfully started websocket client.");
connected = true;
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, retry connection.");
Trace.Error(ex);
retries++;
this._websocketClient = null;
_lastConnectionFailure = DateTime.Now;
}
} while (retryConnection && !connected && retries < 3);
}
public async Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
{
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
bool delivered = false;
int retries = 0;
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket
if (_websocketClient != null)
{
var linesWrapper = startLine.HasValue
? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value)
: new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
delivered = false;
while (!delivered && retries < 3)
{
try
{
if (_websocketClient != null)
{
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage: lastChunk, cancellationToken);
delivered = true;
}
}
catch (Exception ex)
{
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
Trace.Error(ex);
retries++;
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
}
}
}
}
if (!delivered)
{
// Giving up for now, so next invocation of this method won't attempt to reconnect
_websocketClient = null;
// however if 10 minutes have already passed, let's try reestablish connection again
if (_lastConnectionFailure.HasValue && DateTime.Now > _lastConnectionFailure.Value.AddMinutes(10))
{
// Some minutes passed since we retried last time, try connection again
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, TimeSpan.Zero);
}
}
return delivered;
}
private void CloseWebSocket(WebSocketCloseStatus closeStatus, CancellationToken cancellationToken)
{
try
{
_websocketClient?.CloseOutputAsync(closeStatus, "Closing websocket", cancellationToken);
}
catch (Exception websocketEx)
{
// In some cases this might be okay since the websocket might be open yet, so just close and don't trace exceptions
Trace.Info($"Failed to close websocket gracefully {websocketEx.GetType().Name}");
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
@@ -7,6 +7,7 @@ using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
@@ -19,6 +20,8 @@ namespace GitHub.Runner.Common
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
@@ -48,14 +51,8 @@ namespace GitHub.Runner.Common
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
{
CheckConnection();
var jobMessage = RetryRequest<AgentJobRequestMessage>(
return RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken);
if (jobMessage == null)
{
throw new TaskOrchestrationJobNotFoundException(id);
}
return jobMessage;
}
public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
@@ -64,5 +61,12 @@ namespace GitHub.Runner.Common
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
}
}
}

View File

@@ -0,0 +1,237 @@
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Linq;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService
{
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
}
public enum RequestType
{
Get,
Post,
Patch,
Delete
}
public class RunnerDotcomServer : RunnerService, IRunnerDotcomServer
{
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = hostContext.GetService<ITerminal>();
}
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
}
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
return agentPools?.ToAgentPoolList();
}
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
string githubApiUrl;
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runners/register";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runners/register";
}
var bodyObject = new Dictionary<string, Object>()
{
{"url", githubUrl},
{"group_id", runnerGroupId},
{"name", agent.Name},
{"version", agent.Version},
{"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral},
{"labels", agent.Labels},
{"public_key", publicKey}
};
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
}
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
{
int retry = 0;
while (true)
{
retry++;
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
HttpResponseMessage response = null;
if (requestType == RequestType.Get)
{
response = await httpClient.GetAsync(githubApiUrl);
}
else
{
response = await httpClient.PostAsync(githubApiUrl, body);
}
if (response != null)
{
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<T>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
}
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{
Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex);
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
}
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -0,0 +1,14 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -0,0 +1,209 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Runner.Common.Util;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener
{
public sealed class BrokerMessageListener : RunnerService, IMessageListener
{
private RunnerSettings _settings;
private ITerminal _term;
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private IBrokerServer _brokerServer;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_brokerServer = HostContext.GetService<IBrokerServer>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
await RefreshBrokerConnection();
return await Task.FromResult(true);
}
public async Task DeleteSessionAsync()
{
await Task.CompletedTask;
}
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
{
bool encounteringError = false;
int continuousError = 0;
Stopwatch heartbeat = new();
heartbeat.Restart();
var maxRetryCount = 10;
while (true)
{
TaskAgentMessage message = null;
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try
{
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
if (message == null)
{
continue;
}
return message;
}
catch (OperationCanceledException) when (_getMessagesTokenSource.Token.IsCancellationRequested && !token.IsCancellationRequested)
{
Trace.Info("Get messages has been cancelled using local token source. Continue to get messages with new status.");
continue;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Get next message has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
Trace.Error(ex);
if (!IsGetNextMessageExceptionRetriable(ex))
{
throw;
}
else
{
continuousError++;
//retry after a random backoff to avoid service throttling
//in case of there is a service error happened and all agents get kicked off of the long poll and all agent try to reconnect back at the same time.
if (continuousError <= 5)
{
// random backoff [15, 30]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
}
else if (continuousError >= maxRetryCount)
{
throw;
}
else
{
// more aggressive backoff [30, 60]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60), _getNextMessageRetryInterval);
}
if (!encounteringError)
{
//print error only on the first consecutive error
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
// re-create VssConnection before next retry
await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
{
_getMessagesTokenSource.Dispose();
}
if (message == null)
{
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
{
Trace.Info($"No message retrieved within last 30 minutes.");
heartbeat.Restart();
}
else
{
Trace.Verbose($"No message retrieved.");
}
continue;
}
Trace.Info($"Message '{message.MessageId}' received.");
}
}
public async Task DeleteMessageAsync(TaskAgentMessage message)
{
await Task.CompletedTask;
}
private bool IsGetNextMessageExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException ||
ex is TaskAgentPoolNotFoundException ||
ex is TaskAgentSessionExpiredException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnection()
{
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
if (_settings.ServerUrlV2 == null)
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
}
}
}

View File

@@ -31,12 +31,14 @@ namespace GitHub.Runner.Listener.Configuration
{
private IConfigurationStore _store;
private IRunnerServer _runnerServer;
private IRunnerDotcomServer _dotcomServer;
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_runnerServer = HostContext.GetService<IRunnerServer>();
_dotcomServer = HostContext.GetService<IRunnerDotcomServer>();
Trace.Verbose("Creating _store");
_store = hostContext.GetService<IConfigurationStore>();
Trace.Verbose("store created");
@@ -113,6 +115,7 @@ namespace GitHub.Runner.Listener.Configuration
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
string registerToken = string.Empty;
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
@@ -130,9 +133,11 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow;
_term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -176,9 +181,11 @@ namespace GitHub.Runner.Listener.Configuration
// We want to use the native CSP of the platform for storage, so we use the RSACSP directly
RSAParameters publicKey;
var keyManager = HostContext.GetService<IRSAKeyManager>();
string publicKeyXML;
using (var rsa = keyManager.CreateKey())
{
publicKey = rsa.ExportParameters(false);
publicKeyXML = rsa.ToXmlString(includePrivateParameters: false);
}
_term.WriteSection("Runner Registration");
@@ -186,9 +193,17 @@ namespace GitHub.Runner.Listener.Configuration
// If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null;
TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
List<TaskAgentPool> agentPools;
if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
else
{
agentPools = await _runnerServer.GetAgentPoolsAsync();
}
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
{
poolName = command.GetRunnerGroupName(defaultPool?.Name);
@@ -226,8 +241,16 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels();
_term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
else
{
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
}
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
if (agent != null)
@@ -274,7 +297,23 @@ namespace GitHub.Runner.Listener.Configuration
try
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
}
else
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
}
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
@@ -325,24 +364,28 @@ namespace GitHub.Runner.Listener.Configuration
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
if (!runnerSettings.UseV2Flow)
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
}
_term.WriteSection("Runner settings");
@@ -652,7 +695,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -715,7 +758,7 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
@@ -744,14 +787,5 @@ namespace GitHub.Runner.Listener.Configuration
}
return null;
}
private string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Runner.Common;
@@ -20,8 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
{
public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase)
{
{ Constants.Configuration.OAuth, typeof(OAuthCredential)},
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)},
{ Constants.Configuration.OAuth, typeof(OAuthCredential) },
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential) },
};
public ICredentialProvider GetCredentialProvider(string credType)
@@ -93,6 +93,9 @@ namespace GitHub.Runner.Listener.Configuration
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials()
{
ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema));

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,6 +7,7 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -58,6 +59,8 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -86,6 +89,8 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -239,6 +244,13 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -367,9 +379,11 @@ namespace GitHub.Runner.Listener
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -508,7 +522,6 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -531,11 +544,8 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var jobServer = await InitializeJobServerAsync(systemConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
@@ -675,9 +685,128 @@ namespace GitHub.Runner.Listener
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -840,90 +969,93 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
var server = await InitializeJobServerAsync(systemConnection);
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
if (server is IJobServer jobServer)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
if (updatedRecords.Count > 0)
else
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
Trace.Info("Job server does not support log upload yet.");
}
}
catch (Exception ex)
@@ -943,6 +1075,12 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
@@ -979,66 +1117,117 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
try
if (server is IJobServer jobServer)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
}
}
catch (Exception ex)
{
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
else
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
return;
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
{
try
if (server is IJobServer jobServer)
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
try
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
}
catch (Exception ex)
else if (server is IRunServer runServer)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable

View File

@@ -245,6 +245,10 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true;
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");

View File

@@ -6,6 +6,7 @@ using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener
{
@@ -58,7 +59,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#if ARM64
#if ARM64
// A little hacky, but windows gives no way to differentiate between windows 10 and 11.
// By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11
// https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm
@@ -69,7 +70,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#endif
#endif
break;
default:
terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}.");
@@ -137,6 +138,12 @@ namespace GitHub.Runner.Listener
}
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e)
{
terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -4,11 +4,13 @@ using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -136,7 +138,7 @@ namespace GitHub.Runner.Listener
if (command.Remove)
{
// only remove local config files and exit
if(command.RemoveLocalConfig)
if (command.RemoveLocalConfig)
{
configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success;
@@ -209,10 +211,16 @@ namespace GitHub.Runner.Listener
foreach (var config in jitConfig)
{
var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key);
var configContent = Encoding.UTF8.GetString(Convert.FromBase64String(config.Value));
File.WriteAllText(configFile, configContent, Encoding.UTF8);
var configContent = Convert.FromBase64String(config.Value);
#if OS_WINDOWS
if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials))
{
configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine);
}
#endif
File.WriteAllBytes(configFile, configContent);
File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden);
Trace.Info($"Save {configContent.Length} chars to '{configFile}'.");
Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'.");
}
}
catch (Exception ex)
@@ -331,13 +339,26 @@ namespace GitHub.Runner.Listener
}
}
private IMessageListener GetMesageListener(RunnerSettings settings)
{
if (settings.UseV2Flow)
{
Trace.Info($"Using BrokerMessageListener");
var brokerListener = new BrokerMessageListener();
brokerListener.Initialize(HostContext);
return brokerListener;
}
return HostContext.GetService<IMessageListener>();
}
//create worker manager, create message listener and start listening to the queue
private async Task<int> RunAsync(RunnerSettings settings, bool runOnce = false)
{
try
{
Trace.Info(nameof(RunAsync));
_listener = HostContext.GetService<IMessageListener>();
_listener = GetMesageListener(settings);
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
{
return Constants.Runner.ReturnCode.TerminatedError;
@@ -502,7 +523,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
else if (MessageUtil.IsRunServiceJob(message.MessageType))
{
if (autoUpdateInProgress || runOnceJobReceived)
{

View File

@@ -164,7 +164,6 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -207,6 +206,11 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -40,10 +40,19 @@ namespace GitHub.Runner.Sdk
File.WriteAllText(path, StringUtil.ConvertToJson(obj), Encoding.UTF8);
}
public static T LoadObject<T>(string path)
public static T LoadObject<T>(string path, bool required = false)
{
string json = File.ReadAllText(path, Encoding.UTF8);
return StringUtil.ConvertFromJson<T>(json);
if (required && string.IsNullOrEmpty(json))
{
throw new ArgumentNullException($"File {path} is empty");
}
T result = StringUtil.ConvertFromJson<T>(json);
if (required && result == null)
{
throw new ArgumentException("Converting json to object resulted in a null value");
}
return result;
}
public static string GetSha256Hash(string path)

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace GitHub.Runner.Sdk
{
@@ -15,6 +15,7 @@ namespace GitHub.Runner.Sdk
string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.com", StringComparison.OrdinalIgnoreCase);
}

View File

@@ -60,7 +60,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0)
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -86,7 +86,7 @@ namespace GitHub.Runner.Sdk
for (int i = 0; i < pathExtSegments.Length; i++)
{
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)))
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)) && IsPathValid(fullPath, trace))
{
trace?.Info($"Location: '{fullPath}'");
return fullPath;
@@ -105,7 +105,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0)
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -128,5 +128,15 @@ namespace GitHub.Runner.Sdk
return null;
}
// checks if the file is a symlink and if the symlink`s target exists.
private static bool IsPathValid(string path, ITraceWriter trace = null)
{
var fileInfo = new FileInfo(path);
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
if(fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
return false;
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
@@ -81,7 +81,7 @@ namespace GitHub.Runner.Worker
// logging
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
// timeline record update methods
void Start(string currentOperation = null);
@@ -871,8 +871,7 @@ namespace GitHub.Runner.Worker
{
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueSummaryUpload(stepRecordId, name, filePath, deleteSource: false);
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
}
// Add OnMatcherChanged

View File

@@ -3,9 +3,11 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -19,7 +21,7 @@ namespace GitHub.Runner.Worker
[ServiceLocator(Default = typeof(JobRunner))]
public interface IJobRunner : IRunnerService
{
Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
}
public sealed class JobRunner : RunnerService, IJobRunner
@@ -28,7 +30,7 @@ namespace GitHub.Runner.Worker
private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
public async Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
{
// Validate parameters.
Trace.Entering();
@@ -41,13 +43,23 @@ namespace GitHub.Runner.Worker
DateTime jobStartTimeUtc = DateTime.UtcNow;
IRunnerService server = null;
// add orchestration id to useragent for better correlation.
if (message.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out VariableValue orchestrationId) &&
!string.IsNullOrEmpty(orchestrationId.Value))
{
HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value));
}
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
if (MessageUtil.IsRunServiceJob(message.MessageType))
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
server = runServer;
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
_jobServerQueue.Start(message, resultServiceOnly: true);
}
else
{

View File

@@ -184,9 +184,33 @@ namespace GitHub.Services.Common
return settings;
}
/// <summary>
/// Gets or sets the maximum size allowed for response content buffering.
/// </summary>
[DefaultValue(c_defaultContentBufferSize)]
public Int32 MaxContentBufferSize
{
get
{
return m_maxContentBufferSize;
}
set
{
ArgumentUtility.CheckForOutOfRange(value, nameof(value), 0, c_maxAllowedContentBufferSize);
m_maxContentBufferSize = value;
}
}
private static Lazy<RawClientHttpRequestSettings> s_defaultSettings
= new Lazy<RawClientHttpRequestSettings>(ConstructDefaultSettings);
private Int32 m_maxContentBufferSize;
// We will buffer a maximum of 1024MB in the message handler
private const Int32 c_maxAllowedContentBufferSize = 1024 * 1024 * 1024;
// We will buffer, by default, up to 512MB in the message handler
private const Int32 c_defaultContentBufferSize = 1024 * 1024 * 512;
private const Int32 c_defaultMaxRetry = 3;
private static readonly TimeSpan s_defaultTimeout = TimeSpan.FromSeconds(100); //default WebAPI timeout
private ICollection<CultureInfo> m_acceptLanguages = new List<CultureInfo>();

View File

@@ -9,7 +9,7 @@ using GitHub.Services.OAuth;
namespace GitHub.Services.Common
{
public class RawHttpMessageHandler: HttpMessageHandler
public class RawHttpMessageHandler : HttpMessageHandler
{
public RawHttpMessageHandler(
FederatedCredential credentials)
@@ -120,6 +120,7 @@ namespace GitHub.Services.Common
Boolean succeeded = false;
HttpResponseMessageWrapper responseWrapper;
Boolean lastResponseDemandedProxyAuth = false;
Int32 retries = m_maxAuthRetries;
try
{
@@ -138,7 +139,13 @@ namespace GitHub.Services.Common
// Let's start with sending a token
IssuedToken token = await m_tokenProvider.GetTokenAsync(null, tokenSource.Token).ConfigureAwait(false);
ApplyToken(request, token);
ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth);
// The WinHttpHandler will chunk any content that does not have a computed length which is
// not what we want. By loading into a buffer up-front we bypass this behavior and there is
// no difference in the normal HttpClientHandler behavior here since this is what they were
// already doing.
await BufferRequestContentAsync(request, tokenSource.Token).ConfigureAwait(false);
// ConfigureAwait(false) enables the continuation to be run outside any captured
// SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking...
@@ -147,7 +154,8 @@ namespace GitHub.Services.Common
responseWrapper = new HttpResponseMessageWrapper(response);
var isUnAuthorized = responseWrapper.StatusCode == HttpStatusCode.Unauthorized;
if (!isUnAuthorized)
lastResponseDemandedProxyAuth = responseWrapper.StatusCode == HttpStatusCode.ProxyAuthenticationRequired;
if (!isUnAuthorized && !lastResponseDemandedProxyAuth)
{
// Validate the token after it has been successfully authenticated with the server.
m_tokenProvider?.ValidateToken(token, responseWrapper);
@@ -211,15 +219,42 @@ namespace GitHub.Services.Common
}
}
private static async Task BufferRequestContentAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
if (request.Content != null &&
request.Headers.TransferEncodingChunked != true)
{
Int64? contentLength = request.Content.Headers.ContentLength;
if (contentLength == null)
{
await request.Content.LoadIntoBufferAsync().EnforceCancellation(cancellationToken).ConfigureAwait(false);
}
// Explicitly turn off chunked encoding since we have computed the request content size
request.Headers.TransferEncodingChunked = false;
}
}
private void ApplyToken(
HttpRequestMessage request,
IssuedToken token)
IssuedToken token,
bool applyICredentialsToWebProxy = false)
{
switch (token)
{
case null:
return;
case ICredentials credentialsToken:
if (applyICredentialsToWebProxy)
{
HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler;
if (httpClientHandler != null && httpClientHandler.Proxy != null)
{
httpClientHandler.Proxy.Credentials = credentialsToken;
}
}
m_credentialWrapper.InnerCredentials = credentialsToken;
break;
default:

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;

View File

@@ -222,6 +222,9 @@
},
"job-outputs": {
"context": [
"matrix"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context"

View File

@@ -0,0 +1,48 @@
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
public class ListRunnersResponse
{
public ListRunnersResponse()
{
}
public ListRunnersResponse(ListRunnersResponse responseToBeCloned)
{
this.TotalCount = responseToBeCloned.TotalCount;
this.Runners = responseToBeCloned.Runners;
}
[JsonProperty("total_count")]
public int TotalCount
{
get;
set;
}
[JsonProperty("runners")]
public List<Runner> Runners
{
get;
set;
}
public ListRunnersResponse Clone()
{
return new ListRunnersResponse(this);
}
public List<TaskAgent> ToTaskAgents()
{
return Runners.Select(runner => new TaskAgent() { Name = runner.Name }).ToList();
}
}
}

View File

@@ -0,0 +1,63 @@
using System;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.WebApi
{
public class Runner
{
public class Authorization
{
/// <summary>
/// The url to refresh tokens
/// </summary>
[JsonProperty("authorization_url")]
public Uri AuthorizationUrl
{
get;
internal set;
}
/// <summary>
/// The url to connect to to poll for messages
/// </summary>
[JsonProperty("server_url")]
public string ServerUrl
{
get;
internal set;
}
/// <summary>
/// The client id to use when connecting to the authorization_url
/// </summary>
[JsonProperty("client_id")]
public string ClientId
{
get;
internal set;
}
}
[JsonProperty("name")]
public string Name
{
get;
internal set;
}
[JsonProperty("id")]
public Int32 Id
{
get;
internal set;
}
[JsonProperty("authorization")]
public Authorization RunnerAuthorization
{
get;
internal set;
}
}
}

View File

@@ -0,0 +1,98 @@
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
using System.ComponentModel;
using System.Collections.Generic;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
/// <summary>
/// An organization-level grouping of runners.
/// </summary>
[DataContract]
public class RunnerGroup
{
internal RunnerGroup()
{
}
public RunnerGroup(String name)
{
this.Name = name;
}
private RunnerGroup(RunnerGroup poolToBeCloned)
{
this.Id = poolToBeCloned.Id;
this.IsHosted = poolToBeCloned.IsHosted;
this.Name = poolToBeCloned.Name;
this.IsDefault = poolToBeCloned.IsDefault;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("id")]
public Int32 Id
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("name")]
public String Name
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is internal and can't be modified by users
/// </summary>
[DataMember]
[JsonProperty("default")]
public bool IsDefault
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is managed by the service.
/// </summary>
[DataMember]
[JsonProperty("is_hosted")]
public Boolean IsHosted
{
get;
set;
}
}
public class RunnerGroupList
{
public RunnerGroupList()
{
this.RunnerGroups = new List<RunnerGroup>();
}
public List<TaskAgentPool> ToAgentPoolList()
{
var agentPools = this.RunnerGroups.Select(x => new TaskAgentPool(x.Name)
{
Id = x.Id,
IsHosted = x.IsHosted,
IsInternal = x.IsDefault
}).ToList();
return agentPools;
}
[JsonProperty("runner_groups")]
public List<RunnerGroup> RunnerGroups { get; set; }
[JsonProperty("total_count")]
public int Count { get; set; }
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Services.Common;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
@@ -100,6 +100,7 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String Summary = "DistributedTask.Core.Summary";
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
public static readonly String ResultsLog = "Results.Core.Log";
}
[GenerateAllConstants]

View File

@@ -1,13 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class AcquireJobRequest
{
[DataMember(Name = "jobMessageId", EmitDefaultValue = false)]
public string JobMessageId { get; set; }
// This field will be removed in an upcoming Runner release.
// It's left here temporarily to facilitate the transition to the new field name, JobMessageId.
[DataMember(Name = "streamId", EmitDefaultValue = false)]
public string StreamID { get; set; }
public string StreamId { get; set; }
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class RenewJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Runtime.Serialization;
namespace Sdk.RSWebApi.Contracts
{
[DataContract]
public class RenewJobResponse
{
[DataMember]
public DateTime LockedUntil
{
get;
internal set;
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
@@ -8,6 +9,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
@@ -54,7 +56,7 @@ namespace GitHub.Actions.RunService.WebApi
{
}
public Task<AgentJobRequestMessage> GetJobMessageAsync(
public async Task<AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
CancellationToken cancellationToken = default)
@@ -62,20 +64,34 @@ namespace GitHub.Actions.RunService.WebApi
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new AcquireJobRequest
{
StreamID = messageId
JobMessageId = messageId,
StreamId = messageId,
};
requestUri = new Uri(requestUri, "acquirejob");
var requestContent = new ObjectContent<AcquireJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<AgentJobRequestMessage>(
var result = await SendAsync<AgentJobRequestMessage>(
httpMethod,
requestUri: requestUri,
content: requestContent,
cancellationToken: cancellationToken);
if (result.IsSuccess)
{
return result.Value;
}
switch (result.StatusCode)
{
case HttpStatusCode.NotFound:
throw new TaskOrchestrationJobNotFoundException($"Job message not found: {messageId}");
default:
throw new Exception($"Failed to get job message: {result.Error}");
}
}
public Task CompleteJobAsync(
public async Task CompleteJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
@@ -97,11 +113,59 @@ namespace GitHub.Actions.RunService.WebApi
requestUri = new Uri(requestUri, "completejob");
var requestContent = new ObjectContent<CompleteJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync(
var response = await SendAsync(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
if (response.IsSuccessStatusCode)
{
return;
}
switch (response.StatusCode)
{
case HttpStatusCode.NotFound:
throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}");
default:
throw new Exception($"Failed to complete job: {response.ReasonPhrase}");
}
}
public async Task<RenewJobResponse> RenewJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new RenewJobRequest()
{
PlanID = planId,
JobID = jobId
};
requestUri = new Uri(requestUri, "renewjob");
var requestContent = new ObjectContent<RenewJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
var result = await SendAsync<RenewJobResponse>(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
if (result.IsSuccess)
{
return result.Value;
}
switch (result.StatusCode)
{
case HttpStatusCode.NotFound:
throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}");
default:
throw new Exception($"Failed to renew job: {result.Error}");
}
}
}
}

View File

@@ -0,0 +1,92 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
public class BrokerHttpClient : RawHttpClientBase
{
public BrokerHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
: base(baseUrl, credentials)
{
}
public BrokerHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
: base(baseUrl, credentials, settings)
{
}
public BrokerHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, handlers)
{
}
public BrokerHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, settings, handlers)
{
}
public BrokerHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
}
public async Task<TaskAgentMessage> GetRunnerMessageAsync(
string runnerVersion,
TaskAgentStatus? status,
CancellationToken cancellationToken = default
)
{
var requestUri = new Uri(Client.BaseAddress, "message");
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
if (status != null)
{
queryParams.Add("status", status.Value.ToString());
}
if (runnerVersion != null)
{
queryParams.Add("runnerVersion", runnerVersion);
}
var result = await SendAsync<TaskAgentMessage>(
new HttpMethod("GET"),
requestUri: requestUri,
queryParameters: queryParams,
cancellationToken: cancellationToken);
if (result.IsSuccess)
{
return result.Value;
}
throw new Exception($"Failed to get job message: {result.Error}");
}
}
}

View File

@@ -1,3 +1,4 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
@@ -46,12 +47,137 @@ namespace GitHub.Services.Results.Contracts
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateStepSummaryMetadataResponse
public class GetSignedJobLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string StepBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
[DataMember]
public long SoftSizeLimit;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class JobLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string StepBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateMetadataResponse
{
[DataMember]
public bool Ok;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepsUpdateRequest
{
[DataMember]
public IEnumerable<Step> Steps;
[DataMember]
public long ChangeOrder;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class Step
{
[DataMember]
public string ExternalId;
[DataMember]
public int Number;
[DataMember]
public string Name;
[DataMember]
public Status Status;
[DataMember]
public string StartedAt;
[DataMember]
public string CompletedAt;
[DataMember]
public Conclusion Conclusion;
}
public enum Status
{
StatusUnknown = 0,
StatusInProgress = 3,
StatusPending = 5,
StatusCompleted = 6
}
public enum Conclusion
{
ConclusionUnknown = 0,
ConclusionSuccess = 2,
ConclusionFailure = 3,
ConclusionCancelled = 4,
ConclusionSkipped = 7,
}
public static class BlobStorageTypes
{
public static readonly string AzureBlobStorage = "BLOB_STORAGE_TYPE_AZURE";

View File

@@ -21,7 +21,7 @@ using Newtonsoft.Json.Linq;
namespace Sdk.WebApi.WebApi
{
public class RawHttpClientBase: IDisposable
public class RawHttpClientBase : IDisposable
{
protected RawHttpClientBase(
Uri baseUrl,
@@ -101,7 +101,7 @@ namespace Sdk.WebApi.WebApi
}
}
protected Task<T> SendAsync<T>(
protected Task<RawHttpClientResult<T>> SendAsync<T>(
HttpMethod method,
Uri requestUri,
HttpContent content = null,
@@ -112,7 +112,7 @@ namespace Sdk.WebApi.WebApi
return SendAsync<T>(method, null, requestUri, content, queryParameters, userState, cancellationToken);
}
protected async Task<T> SendAsync<T>(
protected async Task<RawHttpClientResult<T>> SendAsync<T>(
HttpMethod method,
IEnumerable<KeyValuePair<String, String>> additionalHeaders,
Uri requestUri,
@@ -128,7 +128,7 @@ namespace Sdk.WebApi.WebApi
}
}
protected async Task<T> SendAsync<T>(
protected async Task<RawHttpClientResult<T>> SendAsync<T>(
HttpRequestMessage message,
Object userState = null,
CancellationToken cancellationToken = default(CancellationToken))
@@ -138,7 +138,16 @@ namespace Sdk.WebApi.WebApi
//from deadlocking...
using (HttpResponseMessage response = await this.SendAsync(message, userState, cancellationToken).ConfigureAwait(false))
{
return await ReadContentAsAsync<T>(response, cancellationToken).ConfigureAwait(false);
if (response.IsSuccessStatusCode)
{
T data = await ReadContentAsAsync<T>(response, cancellationToken).ConfigureAwait(false);
return RawHttpClientResult<T>.Ok(data);
}
else
{
string errorMessage = $"Error: {response.ReasonPhrase}";
return RawHttpClientResult<T>.Fail(errorMessage, response.StatusCode);
}
}
}

View File

@@ -0,0 +1,33 @@
using System.Net;
namespace Sdk.WebApi.WebApi
{
public class RawHttpClientResult
{
public bool IsSuccess { get; protected set; }
public string Error { get; protected set; }
public HttpStatusCode StatusCode { get; protected set; }
public bool IsFailure => !IsSuccess;
protected RawHttpClientResult(bool isSuccess, string error, HttpStatusCode statusCode)
{
IsSuccess = isSuccess;
Error = error;
StatusCode = statusCode;
}
}
public class RawHttpClientResult<T> : RawHttpClientResult
{
public T Value { get; private set; }
protected internal RawHttpClientResult(T value, bool isSuccess, string error, HttpStatusCode statusCode)
: base(isSuccess, error, statusCode)
{
Value = value;
}
public static RawHttpClientResult<T> Fail(string message, HttpStatusCode statusCode) => new RawHttpClientResult<T>(default(T), false, message, statusCode);
public static RawHttpClientResult<T> Ok(T value) => new RawHttpClientResult<T>(value, true, string.Empty, HttpStatusCode.OK);
}
}

View File

@@ -1,11 +1,16 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Results.Contracts;
using System.Net.Http.Formatting;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.Results.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Services.Results.Client
@@ -22,70 +27,141 @@ namespace GitHub.Services.Results.Client
m_token = token;
m_resultsServiceUrl = baseUrl;
m_formatter = new JsonMediaTypeFormatter();
m_changeIdCounter = 1;
}
public async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, string stepId, CancellationToken cancellationToken)
// Get Sas URL calls
private async Task<T> GetResultsSignedURLResponse<R, T>(Uri uri, CancellationToken cancellationToken, R request)
{
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId= jobId,
WorkflowRunBackendId= planId,
StepBackendId= stepId
};
var stepSummaryUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepSummarySignedBlobURL");
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadRequest))
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<GetSignedStepSummaryURLRequest>(request, m_formatter))
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
return await ReadJsonContentAsync<GetSignedStepSummaryURLResponse>(response, cancellationToken);
return await ReadJsonContentAsync<T>(response, cancellationToken);
}
}
}
}
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, string stepId, long size, CancellationToken cancellationToken)
private async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK");
var request = new StepSummaryMetadataCreate()
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId= jobId,
WorkflowRunBackendId= planId,
StepBackendId = stepId,
Size = size,
UploadedAt = timestamp
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString()
};
var stepSummaryUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepSummaryMetadata");
var getStepSummarySignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepSummarySignedBlobURL);
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadCompleteRequest))
return await GetResultsSignedURLResponse<GetSignedStepSummaryURLRequest, GetSignedStepSummaryURLResponse>(getStepSummarySignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
};
var getStepLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getJobLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedJobLogsURLRequest, GetSignedJobLogsURLResponse>(getJobLogsSignedBlobURLEndpoint, cancellationToken, request);
}
// Create metadata calls
private async Task SendRequest<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<StepSummaryMetadataCreate>(request, m_formatter))
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateStepSummaryMetadataResponse>(response, cancellationToken);
var jsonResponse = await ReadJsonContentAsync<CreateMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark step summary upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, size: {size}, timestamp: {timestamp}");
throw new Exception($"Failed to mark {typeof(R).Name} upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
}
}
}
}
}
private async Task<HttpResponseMessage> UploadFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepSummaryMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
Size = size,
UploadedAt = timestamp
};
var createStepSummaryMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepSummaryMetadata);
await SendRequest<StepSummaryMetadataCreate>(createStepSummaryMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
UploadedAt = timestamp,
LineCount = lineCount,
};
var createStepLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepLogsMetadata);
await SendRequest<StepLogsMetadataCreate>(createStepLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new JobLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
UploadedAt = timestamp,
LineCount = lineCount,
};
var createJobLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateJobLogsMetadata);
await SendRequest<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
@@ -95,7 +171,7 @@ namespace GitHub.Services.Results.Client
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("x-ms-blob-type", "BlockBlob");
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
@@ -108,8 +184,55 @@ namespace GitHub.Services.Results.Client
}
}
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
return response;
}
}
// Handle file upload for step summary
public async Task UploadStepSummaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
public async Task UploadStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepSummaryUploadUrlAsync(planId, jobId, stepId, cancellationToken);
@@ -128,15 +251,171 @@ namespace GitHub.Services.Results.Client
// Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
}
// Send step summary upload complete message
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
}
// Handle file upload for step log
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepLogUploadUrlAsync(planId, jobId, stepId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get step log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await StepLogUploadCompleteAsync(planId, jobId, stepId, lineCount, cancellationToken);
}
}
// Handle file upload for job log
public async Task UploadResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetJobLogUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get job log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await JobLogUploadCompleteAsync(planId, jobId, lineCount, cancellationToken);
}
}
private Step ConvertTimelineRecordToStep(TimelineRecord r)
{
return new Step()
{
ExternalId = r.Id.ToString(),
Number = r.Order.GetValueOrDefault(),
Name = r.Name,
Status = ConvertStateToStatus(r.State.GetValueOrDefault()),
StartedAt = r.StartTime?.ToString(Constants.TimestampFormat),
CompletedAt = r.FinishTime?.ToString(Constants.TimestampFormat),
Conclusion = ConvertResultToConclusion(r.Result)
};
}
private Status ConvertStateToStatus(TimelineRecordState s)
{
switch (s)
{
case TimelineRecordState.Completed:
return Status.StatusCompleted;
case TimelineRecordState.Pending:
return Status.StatusPending;
case TimelineRecordState.InProgress:
return Status.StatusInProgress;
default:
return Status.StatusUnknown;
}
}
private Conclusion ConvertResultToConclusion(TaskResult? r)
{
if (!r.HasValue)
{
return Conclusion.ConclusionUnknown;
}
switch (r)
{
case TaskResult.Succeeded:
case TaskResult.SucceededWithIssues:
return Conclusion.ConclusionSuccess;
case TaskResult.Canceled:
return Conclusion.ConclusionCancelled;
case TaskResult.Skipped:
return Conclusion.ConclusionSkipped;
case TaskResult.Failed:
return Conclusion.ConclusionFailure;
default:
return Conclusion.ConclusionUnknown;
}
}
public async Task UpdateWorkflowStepsAsync(Guid planId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var stepRecords = records.Where(r => String.Equals(r.RecordType, "Task", StringComparison.Ordinal));
var stepUpdateRequests = stepRecords.GroupBy(r => r.ParentId).Select(sg => new StepsUpdateRequest()
{
WorkflowRunBackendId = planId.ToString(),
WorkflowJobRunBackendId = sg.Key.ToString(),
ChangeOrder = m_changeIdCounter++,
Steps = sg.Select(ConvertTimelineRecordToStep)
});
var stepUpdateEndpoint = new Uri(m_resultsServiceUrl, Constants.WorkflowStepsUpdate);
foreach (var request in stepUpdateRequests)
{
await SendRequest<StepsUpdateRequest>(stepUpdateEndpoint, cancellationToken, request, timestamp);
}
}
private MediaTypeFormatter m_formatter;
private Uri m_resultsServiceUrl;
private string m_token;
private int m_changeIdCounter;
}
// Constants specific to results
public static class Constants
{
public static readonly string TimestampFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK";
public static readonly string ResultsReceiverTwirpEndpoint = "twirp/results.services.receiver.Receiver/";
public static readonly string GetStepSummarySignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepSummarySignedBlobURL";
public static readonly string CreateStepSummaryMetadata = ResultsReceiverTwirpEndpoint + "CreateStepSummaryMetadata";
public static readonly string GetStepLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepLogsSignedBlobURL";
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string ResultsProtoApiV1Endpoint = "twirp/github.actions.results.api.v1.WorkflowStepUpdateService/";
public static readonly string WorkflowStepsUpdate = ResultsProtoApiV1Endpoint + "WorkflowStepsUpdate";
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
}
}

View File

@@ -19,6 +19,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
public class ConfigurationManagerL0
{
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunnerDotcomServer> _dotcomServer;
private Mock<ILocationServer> _locationServer;
private Mock<ICredentialManager> _credMgr;
private Mock<IPromptManager> _promptManager;
@@ -55,6 +56,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_store = new Mock<IConfigurationStore>();
_extnMgr = new Mock<IExtensionManager>();
_rsaKeyManager = new Mock<IRSAKeyManager>();
_dotcomServer = new Mock<IRunnerDotcomServer>();
#if OS_WINDOWS
_serviceControlManager = new Mock<IWindowsServiceControlManager>();
@@ -71,6 +73,13 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
AuthorizationUrl = new Uri("http://localhost:8080/pipelines"),
};
var expectedRunner = new GitHub.DistributedTask.WebApi.Runner() { Name = expectedAgent.Name, Id = 1 };
expectedRunner.RunnerAuthorization = new GitHub.DistributedTask.WebApi.Runner.Authorization
{
ClientId = expectedAgent.Authorization.ClientId.ToString(),
AuthorizationUrl = new Uri("http://localhost:8080/pipelines"),
};
var connectionData = new ConnectionData()
{
InstanceId = Guid.NewGuid(),
@@ -106,6 +115,10 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_dotcomServer.Setup(x => x.GetRunnersAsync(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
_dotcomServer.Setup(x => x.GetRunnerGroupsAsync(It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedPools));
_dotcomServer.Setup(x => x.AddRunnerAsync(It.IsAny<int>(), It.IsAny<TaskAgent>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedRunner));
rsa = new RSACryptoServiceProvider(2048);
_rsaKeyManager.Setup(x => x.CreateKey()).Returns(rsa);
@@ -119,6 +132,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
tc.SetSingleton<IConfigurationStore>(_store.Object);
tc.SetSingleton<IExtensionManager>(_extnMgr.Object);
tc.SetSingleton<IRunnerServer>(_runnerServer.Object);
tc.SetSingleton<IRunnerDotcomServer>(_dotcomServer.Object);
tc.SetSingleton<ILocationServer>(_locationServer.Object);
#if OS_WINDOWS

View File

@@ -1,12 +1,17 @@
using System;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.WebApi;
using Moq;
using Sdk.RSWebApi.Contracts;
using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -18,6 +23,8 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IProcessChannel> _processChannel;
private Mock<IProcessInvoker> _processInvoker;
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunServer> _runServer;
private Mock<IConfigurationStore> _configurationStore;
public JobDispatcherL0()
@@ -25,6 +32,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_processChannel = new Mock<IProcessChannel>();
_processInvoker = new Mock<IProcessInvoker>();
_runnerServer = new Mock<IRunnerServer>();
_runServer = new Mock<IRunServer>();
_configurationStore = new Mock<IConfigurationStore>();
}
@@ -139,7 +147,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully);
_runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
@@ -197,7 +205,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -205,6 +213,75 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions()
{
//Arrange
using (var hc = new TestHostContext(this))
{
int poolId = 1;
Int64 requestId = 1000;
int count = 0;
var trace = hc.GetTrace(nameof(DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions));
TaskCompletionSource<int> firstJobRequestRenewed = new();
CancellationTokenSource cancellationTokenSource = new();
TaskAgentJobRequest request = new();
PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(lockUntilProperty);
lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5));
hc.SetSingleton<IRunServer>(_runServer.Object);
hc.SetSingleton<IConfigurationStore>(_configurationStore.Object);
_configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 });
_ = _runServer.Setup(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.Returns(() =>
{
count++;
if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully)
{
trace.Info("First renew happens.");
}
if (count < 5)
{
var response = new RenewJobResponse()
{
LockedUntil = request.LockedUntil.Value
};
return Task.FromResult<RenewJobResponse>(response);
}
else if (count == 5)
{
cancellationTokenSource.CancelAfter(10000);
throw new TaskOrchestrationJobNotFoundException("");
}
else
{
throw new InvalidOperationException("Should not reach here.");
}
});
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
EnableRunServiceJobForJobDispatcher(jobDispatcher);
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
await jobDispatcher.RenewJobRequestAsync(GetAgentJobRequestMessage(), GetServiceEndpoint(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
_runServer.Verify(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -256,7 +333,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -312,8 +389,9 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once);
@@ -368,7 +446,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -421,7 +499,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -479,7 +557,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.True(cancellationTokenSource.IsCancellationRequested);
@@ -536,7 +614,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -600,7 +678,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -659,5 +737,78 @@ namespace GitHub.Runner.Common.Tests.Listener
Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent.");
}
}
private static void EnableRunServiceJobForJobDispatcher(JobDispatcher jobDispatcher)
{
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
}
private static ServiceEndpoint GetServiceEndpoint()
{
var serviceEndpoint = new ServiceEndpoint
{
Authorization = new EndpointAuthorization
{
Scheme = EndpointAuthorizationSchemes.OAuth
}
};
serviceEndpoint.Authorization.Parameters.Add("AccessToken", "token");
return serviceEndpoint;
}
private static AgentJobRequestMessage GetAgentJobRequestMessage()
{
var message = new AgentJobRequestMessage(
new TaskOrchestrationPlanReference()
{
PlanType = "Build",
PlanId = Guid.NewGuid(),
Version = 1
},
new TimelineReference()
{
Id = Guid.NewGuid()
},
Guid.NewGuid(),
"jobDisplayName",
"jobName",
null,
null,
new List<TemplateToken>(),
new Dictionary<string, VariableValue>()
{
{
"variables",
new VariableValue()
{
IsSecret = false,
Value = "variables"
}
}
},
new List<MaskHint>()
{
new MaskHint()
{
Type = MaskType.Variable,
Value = "maskHints"
}
},
new JobResources(),
new DictionaryContextData(),
new WorkspaceOptions(),
new List<JobStep>(),
new List<string>()
{
"fileTable"
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env")
);
return message;
}
}
}

View File

@@ -351,7 +351,7 @@ namespace GitHub.Runner.Common.Tests
Assert.False(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com")));
Assert.False(proxy.IsBypassed(new Uri("https://github.comm")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); // no_proxy has '.google.com', specifying only subdomains bypass
Assert.False(proxy.IsBypassed(new Uri("https://example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com:333")));
Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123")));
@@ -374,6 +374,76 @@ namespace GitHub.Runner.Common.Tests
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void BypassAllOnWildcardNoProxy()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com, * , example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void IgnoreWildcardInNoProxySubdomain()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "*.example.com");
var proxy = new RunnerWebProxy();
Assert.False(proxy.IsBypassed(new Uri("http://sub.example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WildcardNoProxyWorksWhenOtherNoProxyAreAround()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com,*,example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]

View File

@@ -0,0 +1,63 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.Serialization.Json;
using System.Text;
using Xunit;
namespace GitHub.Actions.RunService.WebApi.Tests;
public sealed class AcquireJobRequestL0
{
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void VerifySerialization()
{
var jobMessageId = "1526919030369-33";
var request = new AcquireJobRequest
{
JobMessageId = jobMessageId,
StreamId = jobMessageId
};
var serializer = new DataContractJsonSerializer(typeof(AcquireJobRequest));
using var stream = new MemoryStream();
serializer.WriteObject(stream, request);
stream.Position = 0;
using var reader = new StreamReader(stream, Encoding.UTF8);
string json = reader.ReadToEnd();
string expected = DoubleQuotify(string.Format("{{'jobMessageId':'{0}','streamId':'{0}'}}", request.JobMessageId));
Assert.Equal(expected, json);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void VerifyDeserialization()
{
var serializer = new DataContractJsonSerializer(typeof(AcquireJobRequest));
var variations = new Dictionary<string, string>()
{
["{'streamId': 'legacy', 'jobMessageId': 'new-1'}"] = "new-1",
["{'jobMessageId': 'new-2', 'streamId': 'legacy'}"] = "new-2",
["{'jobMessageId': 'new-3'}"] = "new-3"
};
foreach (var (source, expected) in variations)
{
using var stream = new MemoryStream();
stream.Write(Encoding.UTF8.GetBytes(DoubleQuotify(source)));
stream.Position = 0;
var recoveredRecord = serializer.ReadObject(stream) as AcquireJobRequest;
Assert.NotNull(recoveredRecord);
Assert.Equal(expected, recoveredRecord.JobMessageId);
}
}
private static string DoubleQuotify(string text)
{
return text.Replace('\'', '"');
}
}

View File

@@ -1,4 +1,3 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.IO;
@@ -931,6 +930,36 @@ namespace GitHub.Runner.Common.Tests.Util
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void LoadObject_ThrowsOnRequiredLoadObject()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
// Arrange: Create a directory with a file.
string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName());
string file = Path.Combine(directory, "empty file");
Directory.CreateDirectory(directory);
File.WriteAllText(path: file, contents: "");
Assert.Throws<ArgumentNullException>(() => IOUtil.LoadObject<RunnerSettings>(file, true));
file = Path.Combine(directory, "invalid type file");
File.WriteAllText(path: file, contents: " ");
Assert.Throws<ArgumentException>(() => IOUtil.LoadObject<RunnerSettings>(file, true));
// Cleanup.
if (Directory.Exists(directory))
{
Directory.Delete(directory, recursive: true);
}
}
}
private static async Task CreateDirectoryReparsePoint(IHostContext context, string link, string target)
{
#if OS_WINDOWS

View File

@@ -1,4 +1,4 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.IO;
@@ -89,5 +89,128 @@ namespace GitHub.Runner.Common.Tests.Util
Assert.Equal(gitPath, gitPath2);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichHandlesSymlinkToTargetFullPath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}";
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, target);
// Act.
var result = WhichUtil.Which(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find symlink through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichHandlesSymlinkToTargetRelativePath()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}.exe";
string targetName = $"target-{Guid.NewGuid()}.exe";
string target = Path.GetTempPath() + targetName;
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string symlinkName = $"symlink-{Guid.NewGuid()}";
string symlink = Path.GetTempPath() + $"{symlinkName}";
string targetName = $"target-{Guid.NewGuid()}";
string target = Path.GetTempPath() + targetName;
#endif
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
using (File.Create(target))
{
File.CreateSymbolicLink(symlink, targetName);
// Act.
var result = WhichUtil.Which(symlinkName, require: true, trace: trace);
// Assert
Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find {symlinkName} through: {nameof(WhichUtil.Which)}");
}
// Cleanup
File.Delete(symlink);
File.Delete(target);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichThrowsWhenSymlinkBroken()
{
// Arrange
using TestHostContext hc = new TestHostContext(this);
Tracing trace = hc.GetTrace();
string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
#if OS_WINDOWS
string newValue = oldValue + @$";{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}.exe";
#else
string newValue = oldValue + @$":{Path.GetTempPath()}";
string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}";
string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}";
#endif
string target = "no-such-file-cf7e351f";
Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue);
File.CreateSymbolicLink(brokenSymlink, target);
// Act.
var exception = Assert.Throws<FileNotFoundException>(()=>WhichUtil.Which(brokenSymlinkName, require: true, trace: trace));
// Assert
Assert.Equal(brokenSymlinkName, exception.FileName);
// Cleanup
File.Delete(brokenSymlink);
Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue);
}
}
}

View File

@@ -25,21 +25,21 @@ runs:
- run: exit ${{ inputs.exit-code }}
shell: bash
- run: echo "::set-output name=default::true"
- run: echo "default=true" >> $GITHUB_OUTPUT
id: default-conditional
shell: bash
- run: echo "::set-output name=success::true"
- run: echo "success=true" >> $GITHUB_OUTPUT
id: success-conditional
shell: bash
if: success()
- run: echo "::set-output name=failure::true"
- run: echo "failure=true" >> $GITHUB_OUTPUT
id: failure-conditional
shell: bash
if: failure()
- run: echo "::set-output name=always::true"
- run: echo "always=true" >> $GITHUB_OUTPUT
id: always-conditional
shell: bash
if: always()

View File

@@ -1 +1 @@
2.302.1
2.304.0