Compare commits

...

12 Commits

Author SHA1 Message Date
Francesco Renzi
e676c78718 Update releaseVersion to 2.303.0 2023-03-10 11:00:35 +00:00
Tingluo Huang
81b07eb1c4 Prepare runner release 2.303.0 (#2482) 2023-03-10 10:45:56 +00:00
Jongwoo Han
514ecec5a3 Replace deprecated command with environment file (#2429)
Signed-off-by: jongwooo <jongwooo.han@gmail.com>
2023-03-09 22:09:05 -05:00
Ajay
128b212b13 Support matrix context in output keys (#2477) 2023-03-09 21:30:28 -05:00
Nikola Jokic
2dfa28e6e0 Add update certificates to ./run.sh if RUNNER_UPDATE_CA_CERTS env is set (#2471)
* Included entrypoint that will update certs and run ./run.sh

* update ca if RUNNER_UPDATE_CA env is set

* changed env variable to RUNNER_UPDATE_TRUST_STORE

* moved entrypoint to be run.sh, removed Dockerfile entrypoint, added envvar that will update certs

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* Update src/Misc/layoutroot/run.sh

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* removed doc comment on func

---------

Co-authored-by: Bassem Dghaidi <568794+Link-@users.noreply.github.com>
Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-03-08 12:29:55 -05:00
Nikola Jokic
fd96246580 Exit on deprication error (#2299)
* terminate the runner on deprication message

* added TaskAgentVersion exception to catch deprication

* AccessDenied exception with inner exception InvalidTaskAgent

* Access denied exception in program and in message listener

* Fixed copy

* remove trace message from message listener
2023-03-07 14:09:10 +01:00
Ferenc Hammerl
8ef48200b4 Bypass all proxies for all hosts if no_proxy='*' is set (#2395)
* Bypass top level domain even if no_proxy specified it with leading '.'

E.g. no_proxy='.github.com' will now bypass github.com.

* Bypass proxy on all hosts when no_proxy is * (wildcard)

* Undo '.' stripping

* Simplify unit tests

* Respect wildcard even if it's one of many no_proxy items
2023-03-06 11:01:45 +01:00
Tingluo Huang
d61b27b839 Change runner image to make user/folder align with ubuntu-latest hosted runner. (#2469) 2023-03-02 13:42:23 -05:00
Nikola Jokic
542e8a3c98 Runner service exit after consecutive re-try exits (#2426)
* Runner service exit after consecutive re-try exits

* Rename failure counts and include reset count on runner listening for jobs

* Changed from graceful shutdown to stopping=true
2023-02-28 16:00:36 +01:00
Yashwanth Anantharaju
e8975514fd call run service renewjob (#2461)
* call run service renewjob

* format

* formatting

* make it private and expose internals

* lint

* fix exception class

* lint

* fix test as well
2023-02-27 16:50:28 +00:00
Yang Cao
0befa62f64 Add job log upload support (#2447)
* Refactor and add job log upload support

* Rename method to be consistent
2023-02-21 09:55:47 -05:00
Yang Cao
aaf02ab34c Properly guard upload (#2439)
* Revert "Revert "Uploading step logs to Results as well  (#2422)" (#2437)"

This reverts commit 8c096baf49.

* Properly guard the upload to results feature

* Delete skipped file if deletesource is true
2023-02-17 10:31:41 -05:00
30 changed files with 1247 additions and 259 deletions

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha
name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noexternals
name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime
name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256

View File

@@ -24,11 +24,26 @@ RUN export DOCKER_ARCH=x86_64 \
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV RUNNER_ALLOW_RUNASROOT=1
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
WORKDIR /actions-runner
COPY --from=build /actions-runner .
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner

View File

@@ -1,15 +1,17 @@
## Features
- Add support for ghe.com domain (#2420)
- Add docker cli to the runner image. (#2425)
- Support matrix context in output keys (#2477)
- Add update certificates to `./run.sh` if `RUNNER_UPDATE_CA_CERTS` env is set (#2471)
- Bypass all proxies for all hosts if `no_proxy='*'` is set (#2395)
- Change runner image to make user/folder align with `ubuntu-latest` hosted runner. (#2469)
## Bugs
- Fix URL construction bug for RunService (#2396)
- Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313)
- Replace '(' and ')' with '[' and '] from OS.Description for fixing User-Agent header validation (#2288)
- Exit on runner version deprecation error (#2299)
- Runner service exit after consecutive re-try exits (#2426)
## Misc
- Bump dotnet sdk to latest version. (#2392)
- Start calling run service for job completion (#2412, #2423)
- Replace deprecated command with environment file (#2429)
- Make requests to `Run` service to renew job request (#2461)
- Add job/step log upload to Result service (#2447, #2439)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release>
2.303.0

View File

@@ -24,7 +24,8 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -62,7 +63,8 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -92,24 +94,38 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
unknownFailureRetryCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
);
gracefulShutdown();
return;
stopping = true
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

View File

@@ -53,6 +53,33 @@ runWithManualTrap() {
done
}
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $*
else

View File

@@ -30,7 +30,9 @@ namespace GitHub.Runner.Common
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken);
Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -316,7 +318,7 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
public Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
@@ -325,6 +327,23 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock, lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{

View File

@@ -20,7 +20,7 @@ namespace GitHub.Runner.Common
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -31,7 +31,7 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForSummaryUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Common
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<SummaryUploadFileInfo> _summaryFileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
@@ -60,7 +60,7 @@ namespace GitHub.Runner.Common
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _summaryUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
@@ -84,6 +84,9 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -109,9 +112,9 @@ namespace GitHub.Runner.Common
{
Trace.Info("Initializing results client");
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
_resultsClientInitiated = true;
}
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -140,12 +143,12 @@ namespace GitHub.Runner.Common
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_summaryUploadDequeueTask = ProcessSummaryUploadQueueAsync();
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _summaryUploadDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_queueInProcess = true;
}
@@ -176,9 +179,9 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results summary upload queue.");
await ProcessSummaryUploadQueueAsync(runOnce: true);
Trace.Info("Results summary upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
@@ -230,21 +233,43 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueSummaryUpload(Guid stepRecordId, string name, string path, bool deleteSource)
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new SummaryUploadFileInfo()
var newFile = new ResultsUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
StepId = stepRecordId.ToString(),
DeleteSource = deleteSource
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, stepRecordId);
_summaryFileUploadQueue.Enqueue(newFile);
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
@@ -437,18 +462,18 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessSummaryUploadQueueAsync(bool runOnce = false)
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<SummaryUploadFileInfo> filesToUpload = new();
SummaryUploadFileInfo dequeueFile;
while (_summaryFileUploadQueue.TryDequeue(out dequeueFile))
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file upload.
// process at most 10 file uploads.
if (!runOnce && filesToUpload.Count > 10)
{
break;
@@ -459,19 +484,35 @@ namespace GitHub.Runner.Common
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} summary files in one shot through results service.");
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
}
int errorCount = 0;
foreach (var file in filesToUpload)
{
try
{
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
}
catch (Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during summary file upload to results. {ex.Message}" };
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during file upload to results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
@@ -481,16 +522,13 @@ namespace GitHub.Runner.Common
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
Trace.Info("Catch exception during summary file upload to results, keep going since the process is best effort.");
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
}
finally
{
errorCount++;
}
}
Trace.Info("Tried to upload {0} summary files to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
@@ -499,7 +537,7 @@ namespace GitHub.Runner.Common
}
else
{
await Task.Delay(_delayForSummaryUploadDequeue);
await Task.Delay(_delayForResultsUploadDequeue);
}
}
}
@@ -776,16 +814,45 @@ namespace GitHub.Runner.Common
}
}
private async Task UploadSummaryFile(SummaryUploadFileInfo file)
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _jobServer.CreateStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _jobServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _jobServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
bool uploadSucceed = false;
try
{
// Upload the step summary
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
var cancellationTokenSource = new CancellationTokenSource();
await _jobServer.CreateStepSymmaryAsync(file.PlanId, file.JobId, file.StepId, file.Path, cancellationTokenSource.Token);
await uploadHandler(file);
uploadSucceed = true;
}
finally
@@ -798,7 +865,7 @@ namespace GitHub.Runner.Common
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete success results uploaded summary file.");
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Error(ex);
}
}
@@ -822,14 +889,18 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class SummaryUploadFileInfo
internal class ResultsUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public string StepId { get; set; }
public Guid RecordId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}

View File

@@ -21,6 +21,12 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -32,6 +38,13 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -39,8 +52,10 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -60,11 +75,17 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
Create();
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -78,21 +99,25 @@ namespace GitHub.Runner.Common
}
}
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
}
private void Create()
{
NewPage();
EndBlock(true);
}
private void NewPage()
@@ -117,5 +142,27 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
@@ -7,6 +7,7 @@ using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
@@ -19,6 +20,8 @@ namespace GitHub.Runner.Common
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
@@ -64,5 +67,18 @@ namespace GitHub.Runner.Common
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
var renewJobResponse = RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
if (renewJobResponse == null)
{
throw new TaskOrchestrationJobNotFoundException(jobId.ToString());
}
return renewJobResponse;
}
}
}

View File

@@ -0,0 +1,14 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,6 +7,7 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -58,6 +59,8 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -86,6 +89,8 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -239,6 +244,13 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -367,9 +379,11 @@ namespace GitHub.Runner.Listener
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -508,7 +522,6 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -531,11 +544,8 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var jobServer = await InitializeJobServerAsync(systemConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
@@ -675,9 +685,128 @@ namespace GitHub.Runner.Listener
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -840,12 +969,10 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var server = await InitializeJobServerAsync(systemConnection);
if (server is IJobServer jobServer)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
@@ -926,6 +1053,11 @@ namespace GitHub.Runner.Listener
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
else
{
Trace.Info("Job server does not support log upload yet.");
}
}
catch (Exception ex)
{
// Ignore any error during log upload since it's best effort
@@ -943,6 +1075,12 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
@@ -979,7 +1117,9 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
if (server is IJobServer jobServer)
{
try
{
@@ -1025,9 +1165,17 @@ namespace GitHub.Runner.Listener
Trace.Error(ex);
}
}
else
{
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
return;
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
{
if (server is IJobServer jobServer)
{
try
{
@@ -1040,6 +1188,47 @@ namespace GitHub.Runner.Listener
Trace.Error(ex);
}
}
else if (server is IRunServer runServer)
{
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable
{

View File

@@ -245,6 +245,10 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true;
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");

View File

@@ -6,6 +6,7 @@ using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener
{
@@ -137,6 +138,12 @@ namespace GitHub.Runner.Listener
}
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e)
{
terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -9,6 +9,7 @@ using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -502,7 +503,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
else if (MessageUtil.IsRunServiceJob(message.MessageType))
{
if (autoUpdateInProgress || runOnceJobReceived)
{

View File

@@ -164,7 +164,6 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -207,6 +206,11 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
@@ -871,8 +871,7 @@ namespace GitHub.Runner.Worker
{
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueSummaryUpload(stepRecordId, name, filePath, deleteSource: false);
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
}
// Add OnMatcherChanged

View File

@@ -6,6 +6,7 @@ using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -19,7 +20,7 @@ namespace GitHub.Runner.Worker
[ServiceLocator(Default = typeof(JobRunner))]
public interface IJobRunner : IRunnerService
{
Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
}
public sealed class JobRunner : RunnerService, IJobRunner
@@ -28,7 +29,7 @@ namespace GitHub.Runner.Worker
private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
public async Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
{
// Validate parameters.
Trace.Entering();
@@ -42,7 +43,7 @@ namespace GitHub.Runner.Worker
IRunnerService server = null;
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
if (MessageUtil.IsRunServiceJob(message.MessageType))
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);

View File

@@ -222,6 +222,9 @@
},
"job-outputs": {
"context": [
"matrix"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context"

View File

@@ -1,4 +1,4 @@
using GitHub.Services.Common;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
@@ -100,6 +100,7 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String Summary = "DistributedTask.Core.Summary";
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
public static readonly String ResultsLog = "Results.Core.Log";
}
[GenerateAllConstants]

View File

@@ -0,0 +1,15 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class RenewJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Runtime.Serialization;
namespace Sdk.RSWebApi.Contracts
{
[DataContract]
public class RenewJobResponse
{
[DataMember]
public DateTime LockedUntil
{
get;
internal set;
}
}
}

View File

@@ -8,6 +8,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
@@ -103,5 +104,28 @@ namespace GitHub.Actions.RunService.WebApi
content: requestContent,
cancellationToken: cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new RenewJobRequest()
{
PlanID = planId,
JobID = jobId
};
requestUri = new Uri(requestUri, "renewjob");
var requestContent = new ObjectContent<RenewJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<RenewJobResponse>(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
}
}

View File

@@ -46,7 +46,81 @@ namespace GitHub.Services.Results.Contracts
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateStepSummaryMetadataResponse
public class GetSignedJobLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string StepBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
[DataMember]
public long SoftSizeLimit;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class JobLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string StepBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateMetadataResponse
{
[DataMember]
public bool Ok;

View File

@@ -24,68 +24,138 @@ namespace GitHub.Services.Results.Client
m_formatter = new JsonMediaTypeFormatter();
}
public async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, string stepId, CancellationToken cancellationToken)
// Get Sas URL calls
private async Task<T> GetResultsSignedURLResponse<R, T>(Uri uri, CancellationToken cancellationToken, R request)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
return await ReadJsonContentAsync<T>(response, cancellationToken);
}
}
}
}
private async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId= stepId
StepBackendId = stepId.ToString()
};
var stepSummaryUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepSummarySignedBlobURL");
var getStepSummarySignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepSummarySignedBlobURL);
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadRequest))
return await GetResultsSignedURLResponse<GetSignedStepSummaryURLRequest, GetSignedStepSummaryURLResponse>(getStepSummarySignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
};
var getStepLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getJobLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedJobLogsURLRequest, GetSignedJobLogsURLResponse>(getJobLogsSignedBlobURLEndpoint, cancellationToken, request);
}
// Create metadata calls
private async Task CreateMetadata<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<GetSignedStepSummaryURLRequest>(request, m_formatter))
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
return await ReadJsonContentAsync<GetSignedStepSummaryURLResponse>(response, cancellationToken);
var jsonResponse = await ReadJsonContentAsync<CreateMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark {typeof(R).Name} upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
}
}
}
}
}
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, string stepId, long size, CancellationToken cancellationToken)
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK");
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepSummaryMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId,
StepBackendId = stepId.ToString(),
Size = size,
UploadedAt = timestamp
};
var stepSummaryUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepSummaryMetadata");
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadCompleteRequest))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<StepSummaryMetadataCreate>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateStepSummaryMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark step summary upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, size: {size}, timestamp: {timestamp}");
}
}
}
}
var createStepSummaryMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepSummaryMetadata);
await CreateMetadata<StepSummaryMetadataCreate>(createStepSummaryMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
UploadedAt = timestamp,
LineCount = lineCount,
};
var createStepLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepLogsMetadata);
await CreateMetadata<StepLogsMetadataCreate>(createStepLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new JobLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
UploadedAt = timestamp,
LineCount = lineCount,
};
var createJobLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateJobLogsMetadata);
await CreateMetadata<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
@@ -95,7 +165,7 @@ namespace GitHub.Services.Results.Client
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("x-ms-blob-type", "BlockBlob");
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
@@ -108,8 +178,55 @@ namespace GitHub.Services.Results.Client
}
}
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
return response;
}
}
// Handle file upload for step summary
public async Task UploadStepSummaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken)
public async Task UploadStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepSummaryUploadUrlAsync(planId, jobId, stepId, cancellationToken);
@@ -128,15 +245,97 @@ namespace GitHub.Services.Results.Client
// Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
}
// Send step summary upload complete message
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
}
// Handle file upload for step log
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepLogUploadUrlAsync(planId, jobId, stepId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get step log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await StepLogUploadCompleteAsync(planId, jobId, stepId, lineCount, cancellationToken);
}
}
// Handle file upload for job log
public async Task UploadResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetJobLogUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get job log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await JobLogUploadCompleteAsync(planId, jobId, lineCount, cancellationToken);
}
}
private MediaTypeFormatter m_formatter;
private Uri m_resultsServiceUrl;
private string m_token;
}
// Constants specific to results
public static class Constants
{
public static readonly string TimestampFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK";
public static readonly string ResultsReceiverTwirpEndpoint = "twirp/results.services.receiver.Receiver/";
public static readonly string GetStepSummarySignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepSummarySignedBlobURL";
public static readonly string CreateStepSummaryMetadata = ResultsReceiverTwirpEndpoint + "CreateStepSummaryMetadata";
public static readonly string GetStepLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepLogsSignedBlobURL";
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
}
}

View File

@@ -1,12 +1,17 @@
using System;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.WebApi;
using Moq;
using Sdk.RSWebApi.Contracts;
using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -18,6 +23,8 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IProcessChannel> _processChannel;
private Mock<IProcessInvoker> _processInvoker;
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunServer> _runServer;
private Mock<IConfigurationStore> _configurationStore;
public JobDispatcherL0()
@@ -25,6 +32,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_processChannel = new Mock<IProcessChannel>();
_processInvoker = new Mock<IProcessInvoker>();
_runnerServer = new Mock<IRunnerServer>();
_runServer = new Mock<IRunServer>();
_configurationStore = new Mock<IConfigurationStore>();
}
@@ -139,7 +147,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully);
_runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
@@ -197,7 +205,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -205,6 +213,75 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions()
{
//Arrange
using (var hc = new TestHostContext(this))
{
int poolId = 1;
Int64 requestId = 1000;
int count = 0;
var trace = hc.GetTrace(nameof(DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions));
TaskCompletionSource<int> firstJobRequestRenewed = new();
CancellationTokenSource cancellationTokenSource = new();
TaskAgentJobRequest request = new();
PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(lockUntilProperty);
lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5));
hc.SetSingleton<IRunServer>(_runServer.Object);
hc.SetSingleton<IConfigurationStore>(_configurationStore.Object);
_configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 });
_ = _runServer.Setup(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.Returns(() =>
{
count++;
if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully)
{
trace.Info("First renew happens.");
}
if (count < 5)
{
var response = new RenewJobResponse()
{
LockedUntil = request.LockedUntil.Value
};
return Task.FromResult<RenewJobResponse>(response);
}
else if (count == 5)
{
cancellationTokenSource.CancelAfter(10000);
throw new TaskOrchestrationJobNotFoundException("");
}
else
{
throw new InvalidOperationException("Should not reach here.");
}
});
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
EnableRunServiceJobForJobDispatcher(jobDispatcher);
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
await jobDispatcher.RenewJobRequestAsync(GetAgentJobRequestMessage(), GetServiceEndpoint(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
_runServer.Verify(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -256,7 +333,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -312,8 +389,9 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once);
@@ -368,7 +446,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -421,7 +499,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -479,7 +557,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.True(cancellationTokenSource.IsCancellationRequested);
@@ -536,7 +614,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -600,7 +678,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -659,5 +737,78 @@ namespace GitHub.Runner.Common.Tests.Listener
Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent.");
}
}
private static void EnableRunServiceJobForJobDispatcher(JobDispatcher jobDispatcher)
{
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
}
private static ServiceEndpoint GetServiceEndpoint()
{
var serviceEndpoint = new ServiceEndpoint
{
Authorization = new EndpointAuthorization
{
Scheme = EndpointAuthorizationSchemes.OAuth
}
};
serviceEndpoint.Authorization.Parameters.Add("AccessToken", "token");
return serviceEndpoint;
}
private static AgentJobRequestMessage GetAgentJobRequestMessage()
{
var message = new AgentJobRequestMessage(
new TaskOrchestrationPlanReference()
{
PlanType = "Build",
PlanId = Guid.NewGuid(),
Version = 1
},
new TimelineReference()
{
Id = Guid.NewGuid()
},
Guid.NewGuid(),
"jobDisplayName",
"jobName",
null,
null,
new List<TemplateToken>(),
new Dictionary<string, VariableValue>()
{
{
"variables",
new VariableValue()
{
IsSecret = false,
Value = "variables"
}
}
},
new List<MaskHint>()
{
new MaskHint()
{
Type = MaskType.Variable,
Value = "maskHints"
}
},
new JobResources(),
new DictionaryContextData(),
new WorkspaceOptions(),
new List<JobStep>(),
new List<string>()
{
"fileTable"
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env")
);
return message;
}
}
}

View File

@@ -351,7 +351,7 @@ namespace GitHub.Runner.Common.Tests
Assert.False(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com")));
Assert.False(proxy.IsBypassed(new Uri("https://github.comm")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); // no_proxy has '.google.com', specifying only subdomains bypass
Assert.False(proxy.IsBypassed(new Uri("https://example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com:333")));
Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123")));
@@ -374,6 +374,76 @@ namespace GitHub.Runner.Common.Tests
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void BypassAllOnWildcardNoProxy()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com, * , example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void IgnoreWildcardInNoProxySubdomain()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "*.example.com");
var proxy = new RunnerWebProxy();
Assert.False(proxy.IsBypassed(new Uri("http://sub.example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WildcardNoProxyWorksWhenOtherNoProxyAreAround()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com,*,example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]

View File

@@ -25,21 +25,21 @@ runs:
- run: exit ${{ inputs.exit-code }}
shell: bash
- run: echo "::set-output name=default::true"
- run: echo "default=true" >> $GITHUB_OUTPUT
id: default-conditional
shell: bash
- run: echo "::set-output name=success::true"
- run: echo "success=true" >> $GITHUB_OUTPUT
id: success-conditional
shell: bash
if: success()
- run: echo "::set-output name=failure::true"
- run: echo "failure=true" >> $GITHUB_OUTPUT
id: failure-conditional
shell: bash
if: failure()
- run: echo "::set-output name=always::true"
- run: echo "always=true" >> $GITHUB_OUTPUT
id: always-conditional
shell: bash
if: always()

View File

@@ -1 +1 @@
2.302.1
2.303.0