diff --git a/src/Runner.Common/Constants.cs b/src/Runner.Common/Constants.cs index f7e04a565..2f5b69d84 100644 --- a/src/Runner.Common/Constants.cs +++ b/src/Runner.Common/Constants.cs @@ -164,6 +164,7 @@ namespace GitHub.Runner.Common public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/"; public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; + public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/."; } diff --git a/src/Runner.Common/JobServer.cs b/src/Runner.Common/JobServer.cs index 4a37311ca..8a6c4a6a7 100644 --- a/src/Runner.Common/JobServer.cs +++ b/src/Runner.Common/JobServer.cs @@ -13,6 +13,8 @@ using GitHub.Runner.Sdk; using GitHub.Services.Common; using GitHub.Services.WebApi; using GitHub.Services.WebApi.Utilities.Internal; +using GitHub.Services.Results.Client; +using GitHub.Services.OAuth; namespace GitHub.Runner.Common { @@ -22,11 +24,13 @@ namespace GitHub.Runner.Common Task ConnectAsync(VssConnection jobConnection); void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint); + void InitializeResultsClient(Uri uri, string token); // logging and console Task AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken); Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList lines, long? startLine, CancellationToken cancellationToken); Task CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); + Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken); Task CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); Task CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable records, CancellationToken cancellationToken); @@ -40,6 +44,7 @@ namespace GitHub.Runner.Common private bool _hasConnection; private VssConnection _connection; private TaskHttpClient _taskClient; + private ResultsHttpClient _resultsClient; private ClientWebSocket _websocketClient; private ServiceEndpoint _serviceEndpoint; @@ -143,6 +148,12 @@ namespace GitHub.Runner.Common InitializeWebsocketClient(TimeSpan.Zero); } + public void InitializeResultsClient(Uri uri, string token) + { + var httpMessageHandler = HostContext.CreateHttpClientHandler(); + this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true); + } + public ValueTask DisposeAsync() { CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None); @@ -305,6 +316,16 @@ namespace GitHub.Runner.Common return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken); } + public Task CreateStepSymmaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken) + { + if (_resultsClient != null) + { + return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file, cancellationToken: cancellationToken); + } + throw new InvalidOperationException("Results client is not initialized."); + } + + public Task CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken) { CheckConnection(); diff --git a/src/Runner.Common/JobServerQueue.cs b/src/Runner.Common/JobServerQueue.cs index d84614c15..fb97ec732 100644 --- a/src/Runner.Common/JobServerQueue.cs +++ b/src/Runner.Common/JobServerQueue.cs @@ -20,6 +20,7 @@ namespace GitHub.Runner.Common void Start(Pipelines.AgentJobRequestMessage jobRequest); void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null); void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); + void QueueSummaryUpload(Guid timelineId, Guid timelineRecordId, string stepId, string name, string path, bool deleteSource); void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord); } @@ -30,6 +31,7 @@ namespace GitHub.Runner.Common private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500); private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500); private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000); + private static readonly TimeSpan _delayForSummaryUploadDequeue = TimeSpan.FromMilliseconds(1000); // Job message information private Guid _scopeIdentifier; @@ -44,6 +46,8 @@ namespace GitHub.Runner.Common // queue for file upload (log file or attachment) private readonly ConcurrentQueue _fileUploadQueue = new(); + private readonly ConcurrentQueue _summaryFileUploadQueue = new(); + // queue for timeline or timeline record update (one queue per timeline) private readonly ConcurrentDictionary> _timelineUpdateQueue = new(); @@ -56,6 +60,7 @@ namespace GitHub.Runner.Common // Task for each queue's dequeue process private Task _webConsoleLineDequeueTask; private Task _fileUploadDequeueTask; + private Task _summaryUploadDequeueTask; private Task _timelineUpdateDequeueTask; // common @@ -93,6 +98,20 @@ namespace GitHub.Runner.Common _jobServer.InitializeWebsocketClient(serviceEndPoint); + // This code is usually wrapped by an instance of IExecutionContext which isn't available here. + jobRequest.Variables.TryGetValue("system.github.results_endpoint", out VariableValue resultsEndpointVariable); + var resultsReceiverEndpoint = resultsEndpointVariable?.Value; + + if (serviceEndPoint?.Authorization != null && + serviceEndPoint.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) && + !string.IsNullOrEmpty(accessToken) && + !string.IsNullOrEmpty(resultsReceiverEndpoint)) + { + Trace.Info("Initializing results client"); + _jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken); + } + + if (_queueInProcess) { Trace.Info("No-opt, all queue process tasks are running."); @@ -120,10 +139,13 @@ namespace GitHub.Runner.Common Trace.Info("Start process file upload queue."); _fileUploadDequeueTask = ProcessFilesUploadQueueAsync(); + Trace.Info("Start results file upload queue."); + _summaryUploadDequeueTask = ProcessSummaryUploadQueueAsync(); + Trace.Info("Start process timeline update queue."); _timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync(); - _allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask }; + _allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _summaryUploadDequeueTask }; _queueInProcess = true; } @@ -154,6 +176,10 @@ namespace GitHub.Runner.Common await ProcessFilesUploadQueueAsync(runOnce: true); Trace.Info("File upload queue drained."); + Trace.Verbose("Draining results summary upload queue."); + await ProcessSummaryUploadQueueAsync(runOnce: true); + Trace.Info("Results summary upload queue drained."); + // ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown // if there is any timeline records that failed to update contains output variabls. Trace.Verbose("Draining timeline update queue."); @@ -204,6 +230,28 @@ namespace GitHub.Runner.Common _fileUploadQueue.Enqueue(newFile); } + public void QueueSummaryUpload(Guid timelineId, Guid timelineRecordId, string stepId, string name, string path, bool deleteSource) + { + ArgUtil.NotEmpty(timelineId, nameof(timelineId)); + ArgUtil.NotEmpty(timelineRecordId, nameof(timelineRecordId)); + + // all parameter not null, file path exist. + var newFile = new SummaryUploadFileInfo() + { + TimelineId = timelineId, + TimelineRecordId = timelineRecordId, + Name = name, + Path = path, + PlanId = _planId.ToString(), + JobId = _jobTimelineRecordId.ToString(), + StepId = stepId, + DeleteSource = deleteSource + }; + + Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to record {1}", newFile.Path, timelineRecordId); + _summaryFileUploadQueue.Enqueue(newFile); + } + public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord) { ArgUtil.NotEmpty(timelineId, nameof(timelineId)); @@ -299,7 +347,7 @@ namespace GitHub.Runner.Common { try { - // Give at most 60s for each request. + // Give at most 60s for each request. using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60))) { await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token); @@ -394,6 +442,60 @@ namespace GitHub.Runner.Common } } + private async Task ProcessSummaryUploadQueueAsync(bool runOnce = false) + { + Trace.Info("Starting results-based upload queue..."); + + while (!_jobCompletionSource.Task.IsCompleted || runOnce) + { + List filesToUpload = new(); + SummaryUploadFileInfo dequeueFile; + while (_summaryFileUploadQueue.TryDequeue(out dequeueFile)) + { + filesToUpload.Add(dequeueFile); + // process at most 10 file upload. + if (!runOnce && filesToUpload.Count > 10) + { + break; + } + } + + if (filesToUpload.Count > 0) + { + if (runOnce) + { + Trace.Info($"Uploading {filesToUpload.Count} summary files in one shot through results service."); + } + + int errorCount = 0; + foreach (var file in filesToUpload) + { + try + { + await UploadSummaryFile(file); + } + catch (Exception ex) + { + Trace.Info("Catch exception during summary file upload to results, keep going since the process is best effort."); + Trace.Error(ex); + errorCount++; + } + } + + Trace.Info("Tried to upload {0} summary files to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount); + } + + if (runOnce) + { + break; + } + else + { + await Task.Delay(_delayForSummaryUploadDequeue); + } + } + } + private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false) { while (!_jobCompletionSource.Task.IsCompleted || runOnce) @@ -665,6 +767,35 @@ namespace GitHub.Runner.Common } } } + + private async Task UploadSummaryFile(SummaryUploadFileInfo file) + { + bool uploadSucceed = false; + try + { + // Upload the step summary + Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}"); + var cancellationTokenSource = new CancellationTokenSource(); + await _jobServer.CreateStepSymmaryAsync(file.PlanId, file.JobId, file.StepId, file.Path, cancellationTokenSource.Token); + + uploadSucceed = true; + } + finally + { + if (uploadSucceed && file.DeleteSource) + { + try + { + File.Delete(file.Path); + } + catch (Exception ex) + { + Trace.Info("Catch exception during delete success results uploaded summary file."); + Trace.Error(ex); + } + } + } + } } internal class PendingTimelineRecord @@ -683,6 +814,19 @@ namespace GitHub.Runner.Common public bool DeleteSource { get; set; } } + internal class SummaryUploadFileInfo + { + public Guid TimelineId { get; set; } + public Guid TimelineRecordId { get; set; } + public string Name { get; set; } + public string Path { get; set; } + public string PlanId { get; set; } + public string JobId { get; set; } + public string StepId { get; set; } + public bool DeleteSource { get; set; } + } + + internal class ConsoleLineInfo { diff --git a/src/Runner.Worker/ExecutionContext.cs b/src/Runner.Worker/ExecutionContext.cs index 952ba2c50..e716e0a5a 100644 --- a/src/Runner.Worker/ExecutionContext.cs +++ b/src/Runner.Worker/ExecutionContext.cs @@ -80,6 +80,7 @@ namespace GitHub.Runner.Worker // logging long Write(string tag, string message); void QueueAttachFile(string type, string name, string filePath); + void QueueSummaryFile(string name, string filePath, string stepId); // timeline record update methods void Start(string currentOperation = null); @@ -846,6 +847,19 @@ namespace GitHub.Runner.Worker _jobServerQueue.QueueFileUpload(_mainTimelineId, _record.Id, type, name, filePath, deleteSource: false); } + public void QueueSummaryFile(string name, string filePath, string stepId) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + ArgUtil.NotNullOrEmpty(filePath, nameof(filePath)); + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist."); + } + + _jobServerQueue.QueueSummaryUpload(_mainTimelineId, _record.Id, stepId, name, filePath, deleteSource: false); + } + // Add OnMatcherChanged public void Add(OnMatcherChanged handler) { @@ -1085,7 +1099,7 @@ namespace GitHub.Runner.Worker { if (contextData != null && contextData.TryGetValue(PipelineTemplateConstants.Vars, out var varsPipelineContextData) && - varsPipelineContextData != null && + varsPipelineContextData != null && varsPipelineContextData is DictionaryContextData varsContextData) { // Set debug variables only when StepDebug/RunnerDebug variables are not present. diff --git a/src/Runner.Worker/FileCommandManager.cs b/src/Runner.Worker/FileCommandManager.cs index 66362f681..600db078a 100644 --- a/src/Runner.Worker/FileCommandManager.cs +++ b/src/Runner.Worker/FileCommandManager.cs @@ -182,14 +182,6 @@ namespace GitHub.Runner.Worker return; } - if (fileSize > AttachmentSizeLimit) - { - context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, AttachmentSizeLimit / 1024, fileSize / 1024)); - Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload"); - - return; - } - Trace.Verbose($"Step Summary file exists: {filePath} and has a file size of {fileSize} bytes"); var scrubbedFilePath = filePath + "-scrubbed"; @@ -204,13 +196,32 @@ namespace GitHub.Runner.Worker } } - var attachmentName = !context.IsEmbedded - ? context.Id.ToString() + var attachmentName = !context.IsEmbedded + ? context.Id.ToString() : context.EmbeddedId.ToString(); - Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})"); - // Attachments must be added to the parent context (job), not the current context (step) - context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath); + context.Global.Variables.TryGetValue("system.github.results_endpoint", out string resultsReceiverEndpoint); + if (resultsReceiverEndpoint != null) + { + Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})"); + var stepId = context.Id.ToString(); + // Attachments must be added to the parent context (job), not the current context (step) + context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId); + } + else + { + if (fileSize > AttachmentSizeLimit) + { + context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, AttachmentSizeLimit / 1024, fileSize / 1024)); + Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload"); + + return; + } + + Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})"); + // Attachments must be added to the parent context (job), not the current context (step) + context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath); + } } catch (Exception e) { diff --git a/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs index fac99a417..17027d126 100644 --- a/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs +++ b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs @@ -27,7 +27,6 @@ namespace GitHub.DistributedTask.WebApi this.Type = type; this.Name = name; } - [DataMember] public String Type diff --git a/src/Sdk/WebApi/WebApi/Contracts.cs b/src/Sdk/WebApi/WebApi/Contracts.cs new file mode 100644 index 000000000..d240cc1e2 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts.cs @@ -0,0 +1,60 @@ +using System.Runtime.Serialization; +using Newtonsoft.Json; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Services.Results.Contracts +{ + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class GetSignedStepSummaryURLRequest + { + [DataMember] + public string WorkflowJobRunBackendId; + [DataMember] + public string WorkflowRunBackendId; + [DataMember] + public string StepBackendId; + } + + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class GetSignedStepSummaryURLResponse + { + [DataMember] + public string SummaryUrl; + [DataMember] + public long SoftSizeLimit; + [DataMember] + public string BlobStorageType; + } + + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class StepSummaryMetadataCreate + { + [DataMember] + public string StepBackendId; + [DataMember] + public string WorkflowRunBackendId; + [DataMember] + public string WorkflowJobRunBackendId; + [DataMember] + public long Size; + [DataMember] + public string UploadedAt; + } + + [DataContract] + [JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))] + public class CreateStepSummaryMetadataResponse + { + [DataMember] + public bool Ok; + } + + public static class BlobStorageTypes + { + public static readonly string AzureBlobStorage = "BLOB_STORAGE_TYPE_AZURE"; + public static readonly string Unspecified = "BLOB_STORAGE_TYPE_UNSPECIFIED"; + } +} diff --git a/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs new file mode 100644 index 000000000..674abb20d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs @@ -0,0 +1,138 @@ +using System; +using System.IO; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Results.Contracts; +using System.Net.Http.Formatting; +using Sdk.WebApi.WebApi; + +namespace GitHub.Services.Results.Client +{ + public class ResultsHttpClient : RawHttpClientBase + { + public ResultsHttpClient( + Uri baseUrl, + HttpMessageHandler pipeline, + string token, + bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + m_token = token; + m_resultsServiceUrl = baseUrl; + m_formatter = new JsonMediaTypeFormatter(); + } + + public async Task GetStepSummaryUploadUrlAsync(string planId, string jobId, string stepId, CancellationToken cancellationToken) + { + var request = new GetSignedStepSummaryURLRequest() + { + WorkflowJobRunBackendId= jobId, + WorkflowRunBackendId= planId, + StepBackendId= stepId + }; + + var stepSummaryUploadRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/GetStepSummarySignedBlobURL"); + + using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadRequest)) + { + requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token); + requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json")); + + using (HttpContent content = new ObjectContent(request, m_formatter)) + { + requestMessage.Content = content; + using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken)) + { + return await ReadJsonContentAsync(response, cancellationToken); + } + } + } + } + + private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, string stepId, long size, CancellationToken cancellationToken) + { + var timestamp = DateTime.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss.fffK"); + var request = new StepSummaryMetadataCreate() + { + WorkflowJobRunBackendId= jobId, + WorkflowRunBackendId= planId, + StepBackendId = stepId, + Size = size, + UploadedAt = timestamp + }; + + var stepSummaryUploadCompleteRequest = new Uri(m_resultsServiceUrl, "twirp/results.services.receiver.Receiver/CreateStepSummaryMetadata"); + + using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, stepSummaryUploadCompleteRequest)) + { + requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token); + requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json")); + + using (HttpContent content = new ObjectContent(request, m_formatter)) + { + requestMessage.Content = content; + using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken)) + { + var jsonResponse = await ReadJsonContentAsync(response, cancellationToken); + if (!jsonResponse.Ok) + { + throw new Exception($"Failed to mark step summary upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, size: {size}, timestamp: {timestamp}"); + } + } + } + } + } + + private async Task UploadFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken) + { + // Upload the file to the url + var request = new HttpRequestMessage(HttpMethod.Put, url) + { + Content = new StreamContent(file) + }; + + if (blobStorageType == BlobStorageTypes.AzureBlobStorage) + { + request.Content.Headers.Add("x-ms-blob-type", "BlockBlob"); + } + + using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken)) + { + if (!response.IsSuccessStatusCode) + { + throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}"); + } + return response; + } + } + + // Handle file upload for step summary + public async Task UploadStepSummaryAsync(string planId, string jobId, string stepId, string file, CancellationToken cancellationToken) + { + // Get the upload url + var uploadUrlResponse = await GetStepSummaryUploadUrlAsync(planId, jobId, stepId, cancellationToken); + + // Do we want to throw an exception here or should we just be uploading/truncating the data + var fileSize = new FileInfo(file).Length; + if (fileSize > uploadUrlResponse.SoftSizeLimit) + { + throw new Exception($"File size is larger than the upload url allows, file size: {fileSize}, upload url size: {uploadUrlResponse.SoftSizeLimit}"); + } + + // Upload the file + using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true)) + { + var response = await UploadFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken); + } + + // Send step summary upload complete message + await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken); + } + + private MediaTypeFormatter m_formatter; + private Uri m_resultsServiceUrl; + private string m_token; + } +}