Compare commits

...

22 Commits

Author SHA1 Message Date
Luke Tomlinson
534bcec44b Fix conflicts 2023-03-21 11:29:42 -04:00
Luke Tomlinson
97d28f7803 wip 2023-03-21 11:14:56 -04:00
Luke Tomlinson
97c15fd816 Parse runners and send publicKey 2023-03-21 11:13:03 -04:00
Tatyana Kostromskaya
bb7b1e8259 Update runner to handle Dotcom/runner-admin based registration flow (#2487) 2023-03-21 16:05:32 +01:00
Tingluo Huang
440c81b770 Bump container hooks version to 0.3.1 (#2496) 2023-03-20 14:53:47 +00:00
Bassem Dghaidi
9958fc0374 Update container hooks version to 0.3.0 (#2492) 2023-03-17 06:00:55 -04:00
Tingluo Huang
81b07eb1c4 Prepare runner release 2.303.0 (#2482) 2023-03-10 10:45:56 +00:00
Jongwoo Han
514ecec5a3 Replace deprecated command with environment file (#2429)
Signed-off-by: jongwooo <jongwooo.han@gmail.com>
2023-03-09 22:09:05 -05:00
Ajay
128b212b13 Support matrix context in output keys (#2477) 2023-03-09 21:30:28 -05:00
Nikola Jokic
2dfa28e6e0 Add update certificates to ./run.sh if RUNNER_UPDATE_CA_CERTS env is set (#2471)
* Included entrypoint that will update certs and run ./run.sh

* update ca if RUNNER_UPDATE_CA env is set

* changed env variable to RUNNER_UPDATE_TRUST_STORE

* moved entrypoint to be run.sh, removed Dockerfile entrypoint, added envvar that will update certs

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* removed doc comment on func

---------

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>
Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-03-08 12:29:55 -05:00
Nikola Jokic
fd96246580 Exit on deprication error (#2299)
* terminate the runner on deprication message

* added TaskAgentVersion exception to catch deprication

* AccessDenied exception with inner exception InvalidTaskAgent

* Access denied exception in program and in message listener

* Fixed copy

* remove trace message from message listener
2023-03-07 14:09:10 +01:00
Ferenc Hammerl
8ef48200b4 Bypass all proxies for all hosts if no_proxy='*' is set (#2395)
* Bypass top level domain even if no_proxy specified it with leading '.'

E.g. no_proxy='.github.com' will now bypass github.com.

* Bypass proxy on all hosts when no_proxy is * (wildcard)

* Undo '.' stripping

* Simplify unit tests

* Respect wildcard even if it's one of many no_proxy items
2023-03-06 11:01:45 +01:00
Tingluo Huang
d61b27b839 Change runner image to make user/folder align with ubuntu-latest hosted runner. (#2469) 2023-03-02 13:42:23 -05:00
Nikola Jokic
542e8a3c98 Runner service exit after consecutive re-try exits (#2426)
* Runner service exit after consecutive re-try exits

* Rename failure counts and include reset count on runner listening for jobs

* Changed from graceful shutdown to stopping=true
2023-02-28 16:00:36 +01:00
Yashwanth Anantharaju
e8975514fd call run service renewjob (#2461)
* call run service renewjob

* format

* formatting

* make it private and expose internals

* lint

* fix exception class

* lint

* fix test as well
2023-02-27 16:50:28 +00:00
Yang Cao
0befa62f64 Add job log upload support (#2447)
* Refactor and add job log upload support

* Rename method to be consistent
2023-02-21 09:55:47 -05:00
Yang Cao
aaf02ab34c Properly guard upload (#2439)
* Revert "Revert "Uploading step logs to Results as well  (#2422)" (#2437)"

This reverts commit 8c096baf49.

* Properly guard the upload to results feature

* Delete skipped file if deletesource is true
2023-02-17 10:31:41 -05:00
JoannaaKL
02c9d1c704 Don't disable lint errors (#2436)
* Update lint.yml

Don't ignore the formatting errors

* Add formatting made by @cory-miller

* Use dotnet format

* Format only modified files

* Add instruction to contribute.md

* Use git status instead of git diff
2023-02-16 14:33:03 +01:00
Tingluo Huang
982784d704 Wait for docker base on env RUNNER_WAIT_FOR_DOCKER_IN_SECONDS. (#2440) 2023-02-15 16:05:36 -05:00
Yang Cao
8c096baf49 Revert "Uploading step logs to Results as well (#2422)" (#2437)
This reverts commit e979331be4.
2023-02-15 11:21:32 -05:00
JoannaaKL
8d6972e38b Don't add Needs constant twice 2023-02-15 10:50:22 +00:00
Tingluo Huang
1ab35b0938 Use v2 version based on https://github.blog/changelog/2023-01-18-code-scanning-codeql-action-v1-is-now-deprecated/ (#2434) 2023-02-14 16:01:52 +00:00
43 changed files with 1551 additions and 349 deletions

View File

@@ -27,7 +27,7 @@ jobs:
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@v1 uses: github/codeql-action/init@v2
# Override language selection by uncommenting this and choosing your languages # Override language selection by uncommenting this and choosing your languages
# with: # with:
# languages: go, javascript, csharp, python, cpp, java # languages: go, javascript, csharp, python, cpp, java
@@ -38,4 +38,4 @@ jobs:
working-directory: src working-directory: src
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1 uses: github/codeql-action/analyze@v2

View File

@@ -18,7 +18,6 @@ jobs:
uses: github/super-linter@v4 uses: github/super-linter@v4
env: env:
DEFAULT_BRANCH: ${{ github.base_ref }} DEFAULT_BRANCH: ${{ github.base_ref }}
DISABLE_ERRORS: true
EDITORCONFIG_FILE_NAME: .editorconfig EDITORCONFIG_FILE_NAME: .editorconfig
LINTER_RULES_PATH: /src/ LINTER_RULES_PATH: /src/
VALIDATE_ALL_CODEBASE: false VALIDATE_ALL_CODEBASE: false

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls) file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }') sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file" echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha" echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
shell: bash shell: bash
id: sha id: sha
name: Compute SHA256 name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls) file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }') sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file" echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha" echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=sha256::$sha" echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash shell: bash
id: sha_noexternals id: sha_noexternals
name: Compute SHA256 name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls) file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }') sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file" echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha" echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=sha256::$sha" echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash shell: bash
id: sha_noruntime id: sha_noruntime
name: Compute SHA256 name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls) file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }') sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file" echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha" echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=sha256::$sha" echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash shell: bash
id: sha_noruntime_noexternals id: sha_noruntime_noexternals
name: Compute SHA256 name: Compute SHA256

View File

@@ -158,3 +158,11 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
We use the .NET Foundation and CoreCLR style guidelines [located here]( We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md) https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
### Format C# Code
To format both staged and unstaged .cs files
```
cd ./src
./dev.(cmd|sh) format
```

View File

@@ -2,7 +2,7 @@ FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64" ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.2.0 ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
ARG DOCKER_VERSION=20.10.23 ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y RUN apt update -y && apt install curl unzip -y
@@ -24,11 +24,26 @@ RUN export DOCKER_ARCH=x86_64 \
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV RUNNER_ALLOW_RUNASROOT=1 ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1 ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1 ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
WORKDIR /actions-runner RUN apt-get update -y \
COPY --from=build /actions-runner . && apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner

View File

@@ -1,16 +1,17 @@
## Features ## Features
- Add support for ghe.com domain (#2420) - Support matrix context in output keys (#2477)
- Add docker cli to the runner image. (#2425) - Add update certificates to `./run.sh` if `RUNNER_UPDATE_CA_CERTS` env is set (#2471)
- Uploading step logs to Results service (#2422) - Bypass all proxies for all hosts if `no_proxy='*'` is set (#2395)
- Change runner image to make user/folder align with `ubuntu-latest` hosted runner. (#2469)
## Bugs ## Bugs
- Fix URL construction bug for RunService (#2396) - Exit on runner version deprecation error (#2299)
- Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313) - Runner service exit after consecutive re-try exits (#2426)
- Replace '(' and ')' with '[' and '] from OS.Description for fixing User-Agent header validation (#2288)
## Misc ## Misc
- Bump dotnet sdk to latest version. (#2392) - Replace deprecated command with environment file (#2429)
- Start calling run service for job completion (#2412, #2423) - Make requests to `Run` service to renew job request (#2461)
- Add job/step log upload to Result service (#2447, #2439)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. _Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -24,7 +24,8 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN; exitServiceAfterNFailures = NaN;
} }
var consecutiveFailureCount = 0; var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var gracefulShutdown = function () { var gracefulShutdown = function () {
console.log("Shutting down runner listener"); console.log("Shutting down runner listener");
@@ -62,7 +63,8 @@ var runService = function () {
listener.stdout.on("data", (data) => { listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) { if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0; unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
} }
process.stdout.write(data.toString("utf8")); process.stdout.write(data.toString("utf8"));
}); });
@@ -92,24 +94,38 @@ var runService = function () {
console.log( console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds." "Runner listener exit with retryable error, re-launch runner in 5 seconds."
); );
consecutiveFailureCount = 0; unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else if (code === 3 || code === 4) { } else if (code === 3 || code === 4) {
console.log( console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds." "Runner listener exit because of updating, re-launch runner in 5 seconds."
); );
consecutiveFailureCount = 0; unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else { } else {
var messagePrefix = "Runner listener exit with undefined return code"; var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++; unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
if ( if (
!isNaN(exitServiceAfterNFailures) && !isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures unknownFailureRetryCount >= exitServiceAfterNFailures
) { ) {
console.error( console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures` `${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
); );
gracefulShutdown(); stopping = true
return;
} else { } else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`); console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
} }

View File

@@ -18,6 +18,20 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
done done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Wait for docker to start
if [ ! -z "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" ]; then
if [ "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" -gt 0 ]; then
echo "Waiting for docker to be ready."
for i in $(seq "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS"); do
if docker ps > /dev/null 2>&1; then
echo "Docker is ready."
break
fi
"$DIR"/safe_sleep.sh 1
done
fi
fi
updateFile="update.finished" updateFile="update.finished"
"$DIR"/bin/Runner.Listener run $* "$DIR"/bin/Runner.Listener run $*

View File

@@ -53,6 +53,33 @@ runWithManualTrap() {
done done
} }
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $* run $*
else else

View File

@@ -50,6 +50,9 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)] [DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; } public string MonitorSocketAddress { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[IgnoreDataMember] [IgnoreDataMember]
public bool IsHostedServer public bool IsHostedServer
{ {

View File

@@ -32,6 +32,7 @@ namespace GitHub.Runner.Common
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken); Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken); Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken); Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -335,6 +336,14 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException("Results client is not initialized."); throw new InvalidOperationException("Results client is not initialized.");
} }
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken) public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{ {

View File

@@ -20,7 +20,7 @@ namespace GitHub.Runner.Common
void Start(Pipelines.AgentJobRequestMessage jobRequest); void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null); void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines = 0); void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord); void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
} }
@@ -84,6 +84,9 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true; private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true; private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
base.Initialize(hostContext); base.Initialize(hostContext);
@@ -109,9 +112,9 @@ namespace GitHub.Runner.Common
{ {
Trace.Info("Initializing results client"); Trace.Info("Initializing results client");
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken); _jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
_resultsClientInitiated = true;
} }
if (_queueInProcess) if (_queueInProcess)
{ {
Trace.Info("No-opt, all queue process tasks are running."); Trace.Info("No-opt, all queue process tasks are running.");
@@ -230,11 +233,23 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile); _fileUploadQueue.Enqueue(newFile);
} }
public void QueueResultsUpload(Guid recordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines) public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
{ {
if (recordId == _jobTimelineRecordId) if (!_resultsClientInitiated)
{ {
Trace.Verbose("Skipping job log {0} for record {1}", path, recordId); Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return; return;
} }
@@ -246,14 +261,14 @@ namespace GitHub.Runner.Common
Type = type, Type = type,
PlanId = _planId.ToString(), PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(), JobId = _jobTimelineRecordId.ToString(),
RecordId = recordId, RecordId = timelineRecordId,
DeleteSource = deleteSource, DeleteSource = deleteSource,
Finalize = finalize, Finalize = finalize,
FirstBlock = firstBlock, FirstBlock = firstBlock,
TotalLines = totalLines, TotalLines = totalLines,
}; };
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, recordId); Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile); _resultsFileUploadQueue.Enqueue(newFile);
} }
@@ -488,6 +503,11 @@ namespace GitHub.Runner.Common
Trace.Info($"Got a step log file to send to results service."); Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file); await UploadResultsStepLogFile(file);
} }
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
} }
} }
catch (Exception ex) catch (Exception ex)
@@ -796,42 +816,43 @@ namespace GitHub.Runner.Common
private async Task UploadSummaryFile(ResultsUploadFileInfo file) private async Task UploadSummaryFile(ResultsUploadFileInfo file)
{ {
bool uploadSucceed = false; Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
try ResultsFileUploadHandler summaryHandler = async (file) =>
{ {
// Upload the step summary await _jobServer.CreateStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}"); };
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, cancellationTokenSource.Token);
uploadSucceed = true; await UploadResultsFile(file, summaryHandler);
}
finally
{
if (uploadSucceed && file.DeleteSource)
{
try
{
File.Delete(file.Path);
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete success results uploaded summary file.");
Trace.Error(ex);
}
}
}
} }
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file) private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _jobServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _jobServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{ {
bool uploadSucceed = false; bool uploadSucceed = false;
try try
{ {
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}"); await uploadHandler(file);
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, cancellationTokenSource.Token);
uploadSucceed = true; uploadSucceed = true;
} }
finally finally

View File

@@ -21,6 +21,12 @@ namespace GitHub.Runner.Common
// 8 MB // 8 MB
public const int PageSize = 8 * 1024 * 1024; public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId; private Guid _timelineId;
private Guid _timelineRecordId; private Guid _timelineRecordId;
private FileStream _pageData; private FileStream _pageData;
@@ -32,12 +38,6 @@ namespace GitHub.Runner.Common
private string _pagesFolder; private string _pagesFolder;
private IJobServerQueue _jobServerQueue; private IJobServerQueue _jobServerQueue;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private string _resultsDataFileName; private string _resultsDataFileName;
private FileStream _resultsBlockData; private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter; private StreamWriter _resultsBlockWriter;
@@ -99,8 +99,8 @@ namespace GitHub.Runner.Common
} }
} }
var bytes = System.Text.Encoding.UTF8.GetByteCount(line); var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes; _byteCount += bytes;
_blockByteCount += bytes; _blockByteCount += bytes;
if (_byteCount >= PageSize) if (_byteCount >= PageSize)
{ {

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -7,6 +7,7 @@ using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient; using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
@@ -19,6 +20,8 @@ namespace GitHub.Runner.Common
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token); Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token); Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
} }
public sealed class RunServer : RunnerService, IRunServer public sealed class RunServer : RunnerService, IRunServer
@@ -64,5 +67,18 @@ namespace GitHub.Runner.Common
return RetryRequest( return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken); async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
} }
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
var renewJobResponse = RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
if (renewJobResponse == null)
{
throw new TaskOrchestrationJobNotFoundException(jobId.ToString());
}
return renewJobResponse;
}
} }
} }

View File

@@ -0,0 +1,245 @@
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Linq;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService
{
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<TaskAgent> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, string hostId);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
}
public enum RequestType
{
Get,
Post,
Patch,
Delete
}
public class RunnerDotcomServer : RunnerService, IRunnerDotcomServer
{
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = hostContext.GetService<ITerminal>();
}
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
}
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
return agentPools?.ToAgentPoolList();
}
public async Task<TaskAgent> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, string hostId)
{
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
string githubApiUrl;
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runners/register";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runners/register";
}
var bodyObject = new Dictionary<string, Object>()
{
{"url", githubUrl},
{"group_id", runnerGroupId},
{"name", agent.Name},
{"version", agent.Version},
{"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral},
{"labels", agent.Labels},
{"public_key", publicKey},
{"host_id", hostId},
};
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
var runner = await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId),
};
return agent;
}
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
{
int retry = 0;
while (true)
{
retry++;
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
HttpResponseMessage response = null;
if (requestType == RequestType.Get)
{
response = await httpClient.GetAsync(githubApiUrl);
}
else
{
response = await httpClient.PostAsync(githubApiUrl, body);
}
if (response != null)
{
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<T>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
}
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{
Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex);
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
}
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -0,0 +1,14 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -14,6 +14,7 @@ using GitHub.Runner.Sdk;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.Common.Internal; using GitHub.Services.Common.Internal;
using GitHub.Services.OAuth; using GitHub.Services.OAuth;
using GitHub.Services.WebApi.Jwt;
namespace GitHub.Runner.Listener.Configuration namespace GitHub.Runner.Listener.Configuration
{ {
@@ -31,12 +32,14 @@ namespace GitHub.Runner.Listener.Configuration
{ {
private IConfigurationStore _store; private IConfigurationStore _store;
private IRunnerServer _runnerServer; private IRunnerServer _runnerServer;
private IRunnerDotcomServer _dotcomServer;
private ITerminal _term; private ITerminal _term;
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
base.Initialize(hostContext); base.Initialize(hostContext);
_runnerServer = HostContext.GetService<IRunnerServer>(); _runnerServer = HostContext.GetService<IRunnerServer>();
_dotcomServer = HostContext.GetService<IRunnerDotcomServer>();
Trace.Verbose("Creating _store"); Trace.Verbose("Creating _store");
_store = hostContext.GetService<IConfigurationStore>(); _store = hostContext.GetService<IConfigurationStore>();
Trace.Verbose("store created"); Trace.Verbose("store created");
@@ -113,6 +116,8 @@ namespace GitHub.Runner.Listener.Configuration
ICredentialProvider credProvider = null; ICredentialProvider credProvider = null;
VssCredentials creds = null; VssCredentials creds = null;
_term.WriteSection("Authentication"); _term.WriteSection("Authentication");
string registerToken = string.Empty;
string hostId = string.Empty;
while (true) while (true)
{ {
// When testing against a dev deployment of Actions Service, set this environment variable // When testing against a dev deployment of Actions Service, set this environment variable
@@ -130,11 +135,14 @@ namespace GitHub.Runner.Listener.Configuration
else else
{ {
runnerSettings.GitHubUrl = inputUrl; runnerSettings.GitHubUrl = inputUrl;
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration"); registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register); GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl; runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow;
_term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials(); creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth"); Trace.Info("cred retrieved via GitHub auth");
hostId = GetHostId(authResult.Token);
} }
try try
@@ -176,9 +184,11 @@ namespace GitHub.Runner.Listener.Configuration
// We want to use the native CSP of the platform for storage, so we use the RSACSP directly // We want to use the native CSP of the platform for storage, so we use the RSACSP directly
RSAParameters publicKey; RSAParameters publicKey;
var keyManager = HostContext.GetService<IRSAKeyManager>(); var keyManager = HostContext.GetService<IRSAKeyManager>();
string publicKeyXML;
using (var rsa = keyManager.CreateKey()) using (var rsa = keyManager.CreateKey())
{ {
publicKey = rsa.ExportParameters(false); publicKey = rsa.ExportParameters(false);
publicKeyXML = rsa.ToXmlString(includePrivateParameters: false);
} }
_term.WriteSection("Runner Registration"); _term.WriteSection("Runner Registration");
@@ -186,9 +196,17 @@ namespace GitHub.Runner.Listener.Configuration
// If we have more than one runner group available, allow the user to specify which one to be added into // If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null; string poolName = null;
TaskAgentPool agentPool = null; TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync(); List<TaskAgentPool> agentPools;
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault(); if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
else
{
agentPools = await _runnerServer.GetAgentPoolsAsync();
}
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 0) if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
{ {
poolName = command.GetRunnerGroupName(defaultPool?.Name); poolName = command.GetRunnerGroupName(defaultPool?.Name);
@@ -226,8 +244,16 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels(); var userLabels = command.GetLabels();
_term.WriteLine(); _term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
else
{
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
}
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count); Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault(); agent = agents.FirstOrDefault();
if (agent != null) if (agent != null)
@@ -274,7 +300,15 @@ namespace GitHub.Runner.Listener.Configuration
try try
{ {
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent); if (runnerSettings.UseV2Flow)
{
agent = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML, hostId);
}
else
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
}
if (command.DisableUpdate && if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate) command.DisableUpdate != agent.DisableUpdate)
{ {
@@ -652,7 +686,7 @@ namespace GitHub.Runner.Listener.Configuration
{ {
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty)); var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode; responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers); var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
@@ -715,7 +749,7 @@ namespace GitHub.Runner.Listener.Configuration
{ {
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json")); var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode; responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers); var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode) if (response.IsSuccessStatusCode)
{ {
@@ -745,13 +779,11 @@ namespace GitHub.Runner.Listener.Configuration
return null; return null;
} }
private string GetGitHubRequestId(HttpResponseHeaders headers) // Temporary hack for sending legacy host id using v2 flow
private string GetHostId(string accessToken)
{ {
if (headers.TryGetValues("x-github-request-id", out var headerValues)) var claims = JsonWebToken.Create(accessToken).ExtractClaims();
{ return claims.FirstOrDefault(x => x.Type == "aud").Value.Split(':').LastOrDefault();
return headerValues.FirstOrDefault();
}
return string.Empty;
} }
} }
} }

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Runtime.Serialization; using System.Runtime.Serialization;
using GitHub.Runner.Common; using GitHub.Runner.Common;
@@ -20,8 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
{ {
public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase) public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase)
{ {
{ Constants.Configuration.OAuth, typeof(OAuthCredential)}, { Constants.Configuration.OAuth, typeof(OAuthCredential) },
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)}, { Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential) },
}; };
public ICredentialProvider GetCredentialProvider(string credType) public ICredentialProvider GetCredentialProvider(string credType)
@@ -93,6 +93,9 @@ namespace GitHub.Runner.Listener.Configuration
[DataMember(Name = "token")] [DataMember(Name = "token")]
public string Token { get; set; } public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials() public VssCredentials ToVssCredentials()
{ {
ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema)); ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema));

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,6 +7,7 @@ using System.Text;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
@@ -58,6 +59,8 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus; public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
base.Initialize(hostContext); base.Initialize(hostContext);
@@ -86,6 +89,8 @@ namespace GitHub.Runner.Listener
{ {
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received."); Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null; WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0) if (_jobDispatchedQueue.Count > 0)
{ {
@@ -239,6 +244,13 @@ namespace GitHub.Runner.Listener
return; return;
} }
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time. // based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations // if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest // 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -367,9 +379,11 @@ namespace GitHub.Runner.Listener
long requestId = message.RequestId; long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// start renew job request // start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}."); Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token); Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled // wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail // not even start worker if the first renew fail
@@ -426,7 +440,7 @@ namespace GitHub.Runner.Listener
{ {
workerOutput.Add(stdout.Data); workerOutput.Add(stdout.Data);
} }
if (printToStdout) if (printToStdout)
{ {
term.WriteLine(stdout.Data, skipTracing: true); term.WriteLine(stdout.Data, skipTracing: true);
@@ -508,7 +522,6 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message. // we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner. // send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"]; var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url); notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -531,11 +544,8 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput); detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result."); Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var jobServer = await InitializeJobServerAsync(systemConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo); await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space. // Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
@@ -675,9 +685,128 @@ namespace GitHub.Runner.Listener
} }
} }
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token) internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{ {
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null; TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5; int firstRenewRetryLimit = 5;
int encounteringError = 0; int encounteringError = 0;
@@ -840,90 +969,93 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection)); var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection)); ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var jobServer = HostContext.GetService<IJobServer>(); var server = await InitializeJobServerAsync(systemConnection);
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection); if (server is IJobServer jobServer)
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{ {
var logName = Path.GetFileNameWithoutExtension(log); var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id) var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{ {
Trace.Warning($"log file '{log}' is not belongs to current job"); var logName = Path.GetFileNameWithoutExtension(log);
continue; var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
} if (logNameParts.Length != 3)
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{ {
logPages[record.Id] = new Dictionary<int, string>(); Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
logRecords[record.Id] = record; continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
} }
logPages[record.Id][pageNumber] = log; if (!Guid.TryParse(logNameParts[1], out logRecordId))
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{ {
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken)); Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
} }
Trace.Info($"Uploaded unfinished log '{logFile}' for current job."); if (!int.TryParse(logNameParts[2], out pageNumber))
IOUtil.DeleteFile(logFile); {
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
} }
} }
else
if (updatedRecords.Count > 0)
{ {
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None); Trace.Info("Job server does not support log upload yet.");
} }
} }
catch (Exception ex) catch (Exception ex)
@@ -943,6 +1075,12 @@ namespace GitHub.Runner.Listener
return; return;
} }
if (this._isRunServiceJob)
{
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>(); var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5; int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new(); List<Exception> exceptions = new();
@@ -979,66 +1117,117 @@ namespace GitHub.Runner.Listener
} }
// log an error issue to job level timeline record // log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage) private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
{ {
try if (server is IJobServer jobServer)
{ {
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try try
{ {
if (!string.IsNullOrEmpty(errorMessage) && var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) && ArgUtil.NotNull(timeline, nameof(timeline));
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{ {
// the trace should be best effort and not affect any job result if (!string.IsNullOrEmpty(errorMessage) &&
var match = _invalidJsonRegex.Match(errorMessage); message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
if (match.Success && StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
match.Groups.Count == 2)
{ {
var jsonPosition = int.Parse(match.Groups[1].Value); // the trace should be best effort and not affect any job result
var serializedJobMessage = JsonUtility.ToString(message); var match = _invalidJsonRegex.Match(errorMessage);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20); if (match.Success &&
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}"; match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
} }
} }
catch (Exception ex)
{
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
} }
catch (Exception ex) catch (Exception ex)
{ {
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex); Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
} }
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
} }
catch (Exception ex) else
{ {
Trace.Error("Fail to report unhandled exception from Runner.Worker process"); Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
Trace.Error(ex); return;
} }
} }
// raise job completed event to fail the job. // raise job completed event to fail the job.
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message) private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
{ {
try if (server is IJobServer jobServer)
{ {
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed); try
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None); {
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
} }
catch (Exception ex) else if (server is IRunServer runServer)
{ {
Trace.Error("Fail to raise JobCompletedEvent back to service."); try
Trace.Error(ex); {
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
} }
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
} }
private class WorkerDispatcher : IDisposable private class WorkerDispatcher : IDisposable

View File

@@ -182,7 +182,7 @@ namespace GitHub.Runner.Listener
try try
{ {
_getMessagesTokenSource?.Cancel(); _getMessagesTokenSource?.Cancel();
} }
catch (ObjectDisposedException) catch (ObjectDisposedException)
{ {
Trace.Info("_getMessagesTokenSource is already disposed."); Trace.Info("_getMessagesTokenSource is already disposed.");
@@ -245,6 +245,10 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true; _accessTokenRevoked = true;
throw; throw;
} }
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex) catch (Exception ex)
{ {
Trace.Error("Catch exception during get next message."); Trace.Error("Catch exception during get next message.");
@@ -289,7 +293,7 @@ namespace GitHub.Runner.Listener
await HostContext.Delay(_getNextMessageRetryInterval, token); await HostContext.Delay(_getNextMessageRetryInterval, token);
} }
} }
finally finally
{ {
_getMessagesTokenSource.Dispose(); _getMessagesTokenSource.Dispose();
} }

View File

@@ -6,6 +6,7 @@ using System.IO;
using System.Reflection; using System.Reflection;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener namespace GitHub.Runner.Listener
{ {
@@ -58,7 +59,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS."); terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS.");
return Constants.Runner.ReturnCode.TerminatedError; return Constants.Runner.ReturnCode.TerminatedError;
} }
#if ARM64 #if ARM64
// A little hacky, but windows gives no way to differentiate between windows 10 and 11. // A little hacky, but windows gives no way to differentiate between windows 10 and 11.
// By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11 // By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11
// https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm // https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm
@@ -69,7 +70,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system."); terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system.");
return Constants.Runner.ReturnCode.TerminatedError; return Constants.Runner.ReturnCode.TerminatedError;
} }
#endif #endif
break; break;
default: default:
terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}."); terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}.");
@@ -137,6 +138,12 @@ namespace GitHub.Runner.Listener
} }
} }
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e) catch (Exception e)
{ {
terminal.WriteError($"An error occurred: {e.Message}"); terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -9,6 +9,7 @@ using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check; using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
@@ -136,7 +137,7 @@ namespace GitHub.Runner.Listener
if (command.Remove) if (command.Remove)
{ {
// only remove local config files and exit // only remove local config files and exit
if(command.RemoveLocalConfig) if (command.RemoveLocalConfig)
{ {
configManager.DeleteLocalRunnerConfig(); configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success; return Constants.Runner.ReturnCode.Success;
@@ -502,7 +503,7 @@ namespace GitHub.Runner.Listener
} }
} }
// Broker flow // Broker flow
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase)) else if (MessageUtil.IsRunServiceJob(message.MessageType))
{ {
if (autoUpdateInProgress || runOnceJobReceived) if (autoUpdateInProgress || runOnceJobReceived)
{ {

View File

@@ -164,7 +164,6 @@ namespace GitHub.Runner.Sdk
{ {
continue; continue;
} }
_noProxyList.Add(noProxyInfo); _noProxyList.Add(noProxyInfo);
} }
} }
@@ -207,6 +206,11 @@ namespace GitHub.Runner.Sdk
{ {
foreach (var noProxy in _noProxyList) foreach (var noProxy in _noProxyList)
{ {
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false; var matchHost = false;
var matchPort = false; var matchPort = false;

View File

@@ -1,4 +1,4 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
@@ -81,7 +81,7 @@ namespace GitHub.Runner.Worker
// logging // logging
long Write(string tag, string message); long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath); void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId); void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
// timeline record update methods // timeline record update methods
void Start(string currentOperation = null); void Start(string currentOperation = null);
@@ -871,8 +871,7 @@ namespace GitHub.Runner.Worker
{ {
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist."); throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
} }
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: false, firstBlock: false);
} }
// Add OnMatcherChanged // Add OnMatcherChanged

View File

@@ -6,6 +6,7 @@ using System.Net.Http;
using System.Text; using System.Text;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util; using GitHub.Runner.Common.Util;
@@ -19,7 +20,7 @@ namespace GitHub.Runner.Worker
[ServiceLocator(Default = typeof(JobRunner))] [ServiceLocator(Default = typeof(JobRunner))]
public interface IJobRunner : IRunnerService public interface IJobRunner : IRunnerService
{ {
Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken); Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
} }
public sealed class JobRunner : RunnerService, IJobRunner public sealed class JobRunner : RunnerService, IJobRunner
@@ -28,7 +29,7 @@ namespace GitHub.Runner.Worker
private RunnerSettings _runnerSettings; private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager; private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken) public async Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
{ {
// Validate parameters. // Validate parameters.
Trace.Entering(); Trace.Entering();
@@ -42,14 +43,14 @@ namespace GitHub.Runner.Worker
IRunnerService server = null; IRunnerService server = null;
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase)) if (MessageUtil.IsRunServiceJob(message.MessageType))
{ {
var runServer = HostContext.GetService<IRunServer>(); var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection); VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential); await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
server = runServer; server = runServer;
} }
else else
{ {
// Setup the job server and job server queue. // Setup the job server and job server queue.
var jobServer = HostContext.GetService<IJobServer>(); var jobServer = HostContext.GetService<IJobServer>();
@@ -65,7 +66,7 @@ namespace GitHub.Runner.Worker
_jobServerQueue.Start(message); _jobServerQueue.Start(message);
server = jobServer; server = jobServer;
} }
HostContext.WritePerfCounter($"WorkerJobServerQueueStarted_{message.RequestId.ToString()}"); HostContext.WritePerfCounter($"WorkerJobServerQueueStarted_{message.RequestId.ToString()}");

View File

@@ -455,7 +455,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private readonly String[] s_expressionValueNames = new[] private readonly String[] s_expressionValueNames = new[]
{ {
PipelineTemplateConstants.GitHub, PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Strategy, PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix, PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Needs, PipelineTemplateConstants.Needs,

View File

@@ -222,6 +222,9 @@
}, },
"job-outputs": { "job-outputs": {
"context": [
"matrix"
],
"mapping": { "mapping": {
"loose-key-type": "non-empty-string", "loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context" "loose-value-type": "string-runner-context"

View File

@@ -0,0 +1,48 @@
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
public class ListRunnersResponse
{
public ListRunnersResponse()
{
}
public ListRunnersResponse(ListRunnersResponse responseToBeCloned)
{
this.TotalCount = responseToBeCloned.TotalCount;
this.Runners = responseToBeCloned.Runners;
}
[JsonProperty("total_count")]
public int TotalCount
{
get;
set;
}
[JsonProperty("runners")]
public List<Runner> Runners
{
get;
set;
}
public ListRunnersResponse Clone()
{
return new ListRunnersResponse(this);
}
public List<TaskAgent> ToTaskAgents()
{
return Runners.Select(runner => new TaskAgent() { Name = runner.Name }).ToList();
}
}
}

View File

@@ -0,0 +1,47 @@
using System;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.WebApi
{
public class Runner
{
public class Authorization
{
[JsonProperty("authorization_url")]
public Uri AuthorizationUrl
{
get;
internal set;
}
[JsonProperty("client_id")]
public string ClientId
{
get;
internal set;
}
}
[JsonProperty("name")]
public string Name
{
get;
internal set;
}
[JsonProperty("id")]
public Int32 Id
{
get;
internal set;
}
[JsonProperty("authorization")]
public Authorization RunnerAuthorization
{
get;
internal set;
}
}
}

View File

@@ -0,0 +1,98 @@
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
using System.ComponentModel;
using System.Collections.Generic;
using Newtonsoft.Json;
using System.Linq;
namespace GitHub.DistributedTask.WebApi
{
/// <summary>
/// An organization-level grouping of runners.
/// </summary>
[DataContract]
public class RunnerGroup
{
internal RunnerGroup()
{
}
public RunnerGroup(String name)
{
this.Name = name;
}
private RunnerGroup(RunnerGroup poolToBeCloned)
{
this.Id = poolToBeCloned.Id;
this.IsHosted = poolToBeCloned.IsHosted;
this.Name = poolToBeCloned.Name;
this.IsDefault = poolToBeCloned.IsDefault;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("id")]
public Int32 Id
{
get;
set;
}
[DataMember(EmitDefaultValue = false)]
[JsonProperty("name")]
public String Name
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is internal and can't be modified by users
/// </summary>
[DataMember]
[JsonProperty("default")]
public bool IsDefault
{
get;
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is managed by the service.
/// </summary>
[DataMember]
[JsonProperty("is_hosted")]
public Boolean IsHosted
{
get;
set;
}
}
public class RunnerGroupList
{
public RunnerGroupList()
{
this.RunnerGroups = new List<RunnerGroup>();
}
public List<TaskAgentPool> ToAgentPoolList()
{
var agentPools = this.RunnerGroups.Select(x => new TaskAgentPool(x.Name)
{
Id = x.Id,
IsHosted = x.IsHosted,
IsInternal = x.IsDefault
}).ToList();
return agentPools;
}
[JsonProperty("runner_groups")]
public List<RunnerGroup> RunnerGroups { get; set; }
[JsonProperty("total_count")]
public int Count { get; set; }
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using System; using System;
using System.Runtime.Serialization; using System.Runtime.Serialization;

View File

@@ -0,0 +1,15 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class RenewJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Runtime.Serialization;
namespace Sdk.RSWebApi.Contracts
{
[DataContract]
public class RenewJobResponse
{
[DataMember]
public DateTime LockedUntil
{
get;
internal set;
}
}
}

View File

@@ -8,6 +8,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common; using GitHub.Services.Common;
using GitHub.Services.OAuth; using GitHub.Services.OAuth;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi; using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi namespace GitHub.Actions.RunService.WebApi
@@ -98,6 +99,29 @@ namespace GitHub.Actions.RunService.WebApi
var requestContent = new ObjectContent<CompleteJobRequest>(payload, new VssJsonMediaTypeFormatter(true)); var requestContent = new ObjectContent<CompleteJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync( return SendAsync(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new RenewJobRequest()
{
PlanID = planId,
JobID = jobId
};
requestUri = new Uri(requestUri, "renewjob");
var requestContent = new ObjectContent<RenewJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<RenewJobResponse>(
httpMethod, httpMethod,
requestUri, requestUri,
content: requestContent, content: requestContent,

View File

@@ -28,6 +28,42 @@ namespace GitHub.Services.Results.Contracts
public string BlobStorageType; public string BlobStorageType;
} }
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepSummaryMetadataCreate
{
[DataMember]
public string StepBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public long Size;
[DataMember]
public string UploadedAt;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
}
[DataContract] [DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLRequest public class GetSignedStepLogsURLRequest
@@ -47,41 +83,15 @@ namespace GitHub.Services.Results.Contracts
[DataMember] [DataMember]
public string LogsUrl; public string LogsUrl;
[DataMember] [DataMember]
public long SoftSizeLimit;
[DataMember]
public string BlobStorageType; public string BlobStorageType;
[DataMember]
public long SoftSizeLimit;
} }
[DataContract] [DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepSummaryMetadataCreate public class JobLogsMetadataCreate
{ {
[DataMember]
public string StepBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public long Size;
[DataMember]
public string UploadedAt;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateStepSummaryMetadataResponse
{
[DataMember]
public bool Ok;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepLogsMetadataCreate
{
[DataMember]
public string StepBackendId;
[DataMember] [DataMember]
public string WorkflowRunBackendId; public string WorkflowRunBackendId;
[DataMember] [DataMember]
@@ -94,7 +104,23 @@ namespace GitHub.Services.Results.Contracts
[DataContract] [DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateStepLogsMetadataResponse public class StepLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string StepBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateMetadataResponse
{ {
[DataMember] [DataMember]
public bool Ok; public bool Ok;

View File

@@ -24,55 +24,85 @@ namespace GitHub.Services.Results.Client
m_formatter = new JsonMediaTypeFormatter(); m_formatter = new JsonMediaTypeFormatter();
} }
public async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken) // Get Sas URL calls
private async Task<T> GetResultsSignedURLResponse<R, T>(Uri uri, CancellationToken cancellationToken, R request)
{ {
var request = new GetSignedStepSummaryURLRequest() using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
WorkflowJobRunBackendId= jobId,
WorkflowRunBackendId= planId,
StepBackendId= stepId.ToString()
};
var stepSummaryUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepSummarySignedBlobURL");
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadRequest))
{ {
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token); requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json")); requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<GetSignedStepSummaryURLRequest>(request, m_formatter)) using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{ {
requestMessage.Content = content; requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken)) using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{ {
return await ReadJsonContentAsync<GetSignedStepSummaryURLResponse>(response, cancellationToken); return await ReadJsonContentAsync<T>(response, cancellationToken);
} }
} }
} }
} }
public async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken) private async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString()
};
var getStepSummarySignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepSummarySignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepSummaryURLRequest, GetSignedStepSummaryURLResponse>(getStepSummarySignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{ {
var request = new GetSignedStepLogsURLRequest() var request = new GetSignedStepLogsURLRequest()
{ {
WorkflowJobRunBackendId= jobId, WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId= planId, WorkflowRunBackendId = planId,
StepBackendId= stepId.ToString(), StepBackendId = stepId.ToString(),
}; };
var stepLogsUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepLogsSignedBlobURL"); var getStepLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepLogsSignedBlobURL);
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepLogsUploadRequest)) return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getJobLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedJobLogsURLRequest, GetSignedJobLogsURLResponse>(getJobLogsSignedBlobURLEndpoint, cancellationToken, request);
}
// Create metadata calls
private async Task CreateMetadata<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{ {
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token); requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json")); requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<GetSignedStepLogsURLRequest>(request, m_formatter)) using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{ {
requestMessage.Content = content; requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken)) using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{ {
return await ReadJsonContentAsync<GetSignedStepLogsURLResponse>(response, cancellationToken); var jsonResponse = await ReadJsonContentAsync<CreateMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark {typeof(R).Name} upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
}
} }
} }
} }
@@ -80,73 +110,52 @@ namespace GitHub.Services.Results.Client
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken) private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken)
{ {
var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK"); var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepSummaryMetadataCreate() var request = new StepSummaryMetadataCreate()
{ {
WorkflowJobRunBackendId= jobId, WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId= planId, WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(), StepBackendId = stepId.ToString(),
Size = size, Size = size,
UploadedAt = timestamp UploadedAt = timestamp
}; };
var stepSummaryUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepSummaryMetadata"); var createStepSummaryMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepSummaryMetadata);
await CreateMetadata<StepSummaryMetadataCreate>(createStepSummaryMetadataEndpoint, cancellationToken, request, timestamp);
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadCompleteRequest))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<StepSummaryMetadataCreate>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateStepSummaryMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark step summary upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, size: {size}, timestamp: {timestamp}");
}
}
}
}
} }
private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken) private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken)
{ {
var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK"); var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepLogsMetadataCreate() var request = new StepLogsMetadataCreate()
{ {
WorkflowJobRunBackendId= jobId, WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId= planId, WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(), StepBackendId = stepId.ToString(),
UploadedAt = timestamp, UploadedAt = timestamp,
LineCount = lineCount, LineCount = lineCount,
}; };
var stepLogsUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepLogsMetadata"); var createStepLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepLogsMetadata);
await CreateMetadata<StepLogsMetadataCreate>(createStepLogsMetadataEndpoint, cancellationToken, request, timestamp);
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepLogsUploadCompleteRequest))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<StepLogsMetadataCreate>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateStepSummaryMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark step log upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
}
}
}
}
} }
private async Task<HttpResponseMessage> UploadFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken) private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new JobLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
UploadedAt = timestamp,
LineCount = lineCount,
};
var createJobLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateJobLogsMetadata);
await CreateMetadata<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{ {
// Upload the file to the url // Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url) var request = new HttpRequestMessage(HttpMethod.Put, url)
@@ -156,7 +165,7 @@ namespace GitHub.Services.Results.Client
if (blobStorageType == BlobStorageTypes.AzureBlobStorage) if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{ {
request.Content.Headers.Add("x-ms-blob-type", "BlockBlob"); request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
} }
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
@@ -177,7 +186,7 @@ namespace GitHub.Services.Results.Client
}; };
if (blobStorageType == BlobStorageTypes.AzureBlobStorage) if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{ {
request.Content.Headers.Add("x-ms-blob-type", "AppendBlob"); request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0"); request.Content.Headers.Add("Content-Length", "0");
} }
@@ -190,7 +199,7 @@ namespace GitHub.Services.Results.Client
return response; return response;
} }
} }
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken) private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{ {
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock"; var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
@@ -203,7 +212,7 @@ namespace GitHub.Services.Results.Client
if (blobStorageType == BlobStorageTypes.AzureBlobStorage) if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{ {
request.Content.Headers.Add("Content-Length", fileSize.ToString()); request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add("x-ms-blob-sealed", finalize.ToString()); request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
} }
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
@@ -236,7 +245,7 @@ namespace GitHub.Services.Results.Client
// Upload the file // Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{ {
var response = await UploadFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken); var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
} }
// Send step summary upload complete message // Send step summary upload complete message
@@ -253,9 +262,6 @@ namespace GitHub.Services.Results.Client
throw new Exception("Failed to get step log upload url"); throw new Exception("Failed to get step log upload url");
} }
// Do we want to throw an exception here or should we just be uploading/truncating the data
var fileSize = new FileInfo(file).Length;
// Create the Append blob // Create the Append blob
if (firstBlock) if (firstBlock)
{ {
@@ -263,6 +269,7 @@ namespace GitHub.Services.Results.Client
} }
// Upload content // Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{ {
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken); var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
@@ -276,8 +283,59 @@ namespace GitHub.Services.Results.Client
} }
} }
// Handle file upload for job log
public async Task UploadResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetJobLogUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get job log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await JobLogUploadCompleteAsync(planId, jobId, lineCount, cancellationToken);
}
}
private MediaTypeFormatter m_formatter; private MediaTypeFormatter m_formatter;
private Uri m_resultsServiceUrl; private Uri m_resultsServiceUrl;
private string m_token; private string m_token;
} }
// Constants specific to results
public static class Constants
{
public static readonly string TimestampFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK";
public static readonly string ResultsReceiverTwirpEndpoint = "twirp/results.services.receiver.Receiver/";
public static readonly string GetStepSummarySignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepSummarySignedBlobURL";
public static readonly string CreateStepSummaryMetadata = ResultsReceiverTwirpEndpoint + "CreateStepSummaryMetadata";
public static readonly string GetStepLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepLogsSignedBlobURL";
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
}
} }

View File

@@ -19,6 +19,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
public class ConfigurationManagerL0 public class ConfigurationManagerL0
{ {
private Mock<IRunnerServer> _runnerServer; private Mock<IRunnerServer> _runnerServer;
private Mock<IRunnerDotcomServer> _dotcomServer;
private Mock<ILocationServer> _locationServer; private Mock<ILocationServer> _locationServer;
private Mock<ICredentialManager> _credMgr; private Mock<ICredentialManager> _credMgr;
private Mock<IPromptManager> _promptManager; private Mock<IPromptManager> _promptManager;
@@ -55,6 +56,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_store = new Mock<IConfigurationStore>(); _store = new Mock<IConfigurationStore>();
_extnMgr = new Mock<IExtensionManager>(); _extnMgr = new Mock<IExtensionManager>();
_rsaKeyManager = new Mock<IRSAKeyManager>(); _rsaKeyManager = new Mock<IRSAKeyManager>();
_dotcomServer = new Mock<IRunnerDotcomServer>();
#if OS_WINDOWS #if OS_WINDOWS
_serviceControlManager = new Mock<IWindowsServiceControlManager>(); _serviceControlManager = new Mock<IWindowsServiceControlManager>();
@@ -106,6 +108,10 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent)); _runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent)); _runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
_dotcomServer.Setup(x => x.GetRunnersAsync(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
_dotcomServer.Setup(x => x.GetRunnerGroupsAsync(It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedPools));
_dotcomServer.Setup(x => x.AddRunnerAsync(It.IsAny<int>(), It.IsAny<TaskAgent>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgent));
rsa = new RSACryptoServiceProvider(2048); rsa = new RSACryptoServiceProvider(2048);
_rsaKeyManager.Setup(x => x.CreateKey()).Returns(rsa); _rsaKeyManager.Setup(x => x.CreateKey()).Returns(rsa);
@@ -119,6 +125,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
tc.SetSingleton<IConfigurationStore>(_store.Object); tc.SetSingleton<IConfigurationStore>(_store.Object);
tc.SetSingleton<IExtensionManager>(_extnMgr.Object); tc.SetSingleton<IExtensionManager>(_extnMgr.Object);
tc.SetSingleton<IRunnerServer>(_runnerServer.Object); tc.SetSingleton<IRunnerServer>(_runnerServer.Object);
tc.SetSingleton<IRunnerDotcomServer>(_dotcomServer.Object);
tc.SetSingleton<ILocationServer>(_locationServer.Object); tc.SetSingleton<ILocationServer>(_locationServer.Object);
#if OS_WINDOWS #if OS_WINDOWS

View File

@@ -1,12 +1,17 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Reflection; using System.Reflection;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener; using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using Moq; using Moq;
using Sdk.RSWebApi.Contracts;
using Xunit; using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -18,6 +23,8 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IProcessChannel> _processChannel; private Mock<IProcessChannel> _processChannel;
private Mock<IProcessInvoker> _processInvoker; private Mock<IProcessInvoker> _processInvoker;
private Mock<IRunnerServer> _runnerServer; private Mock<IRunnerServer> _runnerServer;
private Mock<IRunServer> _runServer;
private Mock<IConfigurationStore> _configurationStore; private Mock<IConfigurationStore> _configurationStore;
public JobDispatcherL0() public JobDispatcherL0()
@@ -25,6 +32,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_processChannel = new Mock<IProcessChannel>(); _processChannel = new Mock<IProcessChannel>();
_processInvoker = new Mock<IProcessInvoker>(); _processInvoker = new Mock<IProcessInvoker>();
_runnerServer = new Mock<IRunnerServer>(); _runnerServer = new Mock<IRunnerServer>();
_runServer = new Mock<IRunServer>();
_configurationStore = new Mock<IConfigurationStore>(); _configurationStore = new Mock<IConfigurationStore>();
} }
@@ -139,7 +147,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully); Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully);
_runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5)); _runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
@@ -197,7 +205,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested); Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -205,6 +213,75 @@ namespace GitHub.Runner.Common.Tests.Listener
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions()
{
//Arrange
using (var hc = new TestHostContext(this))
{
int poolId = 1;
Int64 requestId = 1000;
int count = 0;
var trace = hc.GetTrace(nameof(DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions));
TaskCompletionSource<int> firstJobRequestRenewed = new();
CancellationTokenSource cancellationTokenSource = new();
TaskAgentJobRequest request = new();
PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(lockUntilProperty);
lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5));
hc.SetSingleton<IRunServer>(_runServer.Object);
hc.SetSingleton<IConfigurationStore>(_configurationStore.Object);
_configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 });
_ = _runServer.Setup(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.Returns(() =>
{
count++;
if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully)
{
trace.Info("First renew happens.");
}
if (count < 5)
{
var response = new RenewJobResponse()
{
LockedUntil = request.LockedUntil.Value
};
return Task.FromResult<RenewJobResponse>(response);
}
else if (count == 5)
{
cancellationTokenSource.CancelAfter(10000);
throw new TaskOrchestrationJobNotFoundException("");
}
else
{
throw new InvalidOperationException("Should not reach here.");
}
});
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
EnableRunServiceJobForJobDispatcher(jobDispatcher);
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
await jobDispatcher.RenewJobRequestAsync(GetAgentJobRequestMessage(), GetServiceEndpoint(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
_runServer.Verify(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]
[Trait("Category", "Runner")] [Trait("Category", "Runner")]
@@ -256,7 +333,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested); Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -312,8 +389,9 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
// Act // Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert // Assert
_configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once); _configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once);
@@ -368,7 +446,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
// Act // Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert // Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never); _configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -421,7 +499,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
// Act // Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert // Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never); _configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -479,7 +557,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.True(cancellationTokenSource.IsCancellationRequested); Assert.True(cancellationTokenSource.IsCancellationRequested);
@@ -536,7 +614,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed."); Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed.");
Assert.False(cancellationTokenSource.IsCancellationRequested); Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -600,7 +678,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher(); var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc); jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token); await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested); Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -659,5 +737,78 @@ namespace GitHub.Runner.Common.Tests.Listener
Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent."); Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent.");
} }
} }
private static void EnableRunServiceJobForJobDispatcher(JobDispatcher jobDispatcher)
{
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
}
private static ServiceEndpoint GetServiceEndpoint()
{
var serviceEndpoint = new ServiceEndpoint
{
Authorization = new EndpointAuthorization
{
Scheme = EndpointAuthorizationSchemes.OAuth
}
};
serviceEndpoint.Authorization.Parameters.Add("AccessToken", "token");
return serviceEndpoint;
}
private static AgentJobRequestMessage GetAgentJobRequestMessage()
{
var message = new AgentJobRequestMessage(
new TaskOrchestrationPlanReference()
{
PlanType = "Build",
PlanId = Guid.NewGuid(),
Version = 1
},
new TimelineReference()
{
Id = Guid.NewGuid()
},
Guid.NewGuid(),
"jobDisplayName",
"jobName",
null,
null,
new List<TemplateToken>(),
new Dictionary<string, VariableValue>()
{
{
"variables",
new VariableValue()
{
IsSecret = false,
Value = "variables"
}
}
},
new List<MaskHint>()
{
new MaskHint()
{
Type = MaskType.Variable,
Value = "maskHints"
}
},
new JobResources(),
new DictionaryContextData(),
new WorkspaceOptions(),
new List<JobStep>(),
new List<string>()
{
"fileTable"
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env")
);
return message;
}
} }
} }

View File

@@ -351,7 +351,7 @@ namespace GitHub.Runner.Common.Tests
Assert.False(proxy.IsBypassed(new Uri("https://actions.com"))); Assert.False(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com"))); Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com")));
Assert.False(proxy.IsBypassed(new Uri("https://github.comm"))); Assert.False(proxy.IsBypassed(new Uri("https://github.comm")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); // no_proxy has '.google.com', specifying only subdomains bypass
Assert.False(proxy.IsBypassed(new Uri("https://example.com"))); Assert.False(proxy.IsBypassed(new Uri("https://example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com:333"))); Assert.False(proxy.IsBypassed(new Uri("http://example.com:333")));
Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123"))); Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123")));
@@ -374,6 +374,76 @@ namespace GitHub.Runner.Common.Tests
CleanProxyEnv(); CleanProxyEnv();
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void BypassAllOnWildcardNoProxy()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com, * , example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void IgnoreWildcardInNoProxySubdomain()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "*.example.com");
var proxy = new RunnerWebProxy();
Assert.False(proxy.IsBypassed(new Uri("http://sub.example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WildcardNoProxyWorksWhenOtherNoProxyAreAround()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com,*,example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]

View File

@@ -25,25 +25,25 @@ runs:
- run: exit ${{ inputs.exit-code }} - run: exit ${{ inputs.exit-code }}
shell: bash shell: bash
- run: echo "::set-output name=default::true" - run: echo "default=true" >> $GITHUB_OUTPUT
id: default-conditional id: default-conditional
shell: bash shell: bash
- run: echo "::set-output name=success::true" - run: echo "success=true" >> $GITHUB_OUTPUT
id: success-conditional id: success-conditional
shell: bash shell: bash
if: success() if: success()
- run: echo "::set-output name=failure::true" - run: echo "failure=true" >> $GITHUB_OUTPUT
id: failure-conditional id: failure-conditional
shell: bash shell: bash
if: failure() if: failure()
- run: echo "::set-output name=always::true" - run: echo "always=true" >> $GITHUB_OUTPUT
id: always-conditional id: always-conditional
shell: bash shell: bash
if: always() if: always()
- run: echo "failed" - run: echo "failed"
shell: bash shell: bash
if: ${{ inputs.exit-code == 1 && failure() }} if: ${{ inputs.exit-code == 1 && failure() }}

View File

@@ -203,6 +203,13 @@ function runtest ()
dotnet msbuild -t:test -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:RunnerVersion="${RUNNER_VERSION}" ./dir.proj || failed "failed tests" dotnet msbuild -t:test -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:RunnerVersion="${RUNNER_VERSION}" ./dir.proj || failed "failed tests"
} }
function format()
{
heading "Formatting..."
files="$(git status -s "*.cs" | awk '{print $2}' | tr '\n' ' ')"
dotnet format ${SCRIPT_DIR}/ActionsRunner.sln --exclude / --include $files || failed "failed formatting"
}
function package () function package ()
{ {
if [ ! -d "${LAYOUT_DIR}/bin" ]; then if [ ! -d "${LAYOUT_DIR}/bin" ]; then
@@ -360,7 +367,9 @@ case $DEV_CMD in
"l") layout;; "l") layout;;
"package") package;; "package") package;;
"p") package;; "p") package;;
*) echo "Invalid cmd. Use build(b), test(t), layout(l) or package(p)";; "format") format;;
"f") format;;
*) echo "Invalid cmd. Use build(b), test(t), layout(l), package(p), or format(f)";;
esac esac
popd popd

View File

@@ -1 +1 @@
2.302.0 2.303.0