Compare commits

...

6 Commits

Author SHA1 Message Date
ruvceskistefan
3f6b4be9af Issue 1596: Runner throws null ref exception when new line after EOF is missing (#1687)
* Issue 1596: runner throws nullref exception when writting env var

* Adding tests for missing new line after EOF marker

* Changing newline to new line
2022-02-24 12:57:42 +00:00
Nikola Jokic
b3938017b5 Repaired hashFiles call so if error was thrown, it was returned to process invoker (#1678)
* hashFiles.ts added exit status on promise action

* generated layoutbin/hashfiles/index.js
2022-02-24 12:57:42 +00:00
ruvceskistefan
acb46a3e1e Issue 1261: inconsistency of outputs (both canceled and cancelled are used) (#1624)
* Issue 1261: inconsistency of outputs

* Changing cancelled to canceled in one error message
2022-02-24 12:57:42 +00:00
Tingluo Huang
5e90b3ebb5 Add SHA to useragent. (#1694) 2022-02-24 12:57:42 +00:00
Thomas Boop
493a0bd8fc Revert "revert node12 version due to fs.copyFileSync hang https://git… (#1651)
* Revert "revert node12 version due to fs.copyFileSync hang https://github.com/actions/runner/issues/1536 (#1537)"

bef164a12f

* check hashs before tests because tests rely on right values + update hashes

* fix tests

* use hc trace
2022-02-24 12:57:42 +00:00
Nikola Jokic
e5d19459a7 RunnerService.js added logic to fail on N attempts 2022-02-17 14:08:01 +01:00
23 changed files with 219 additions and 104 deletions

View File

@@ -50,13 +50,6 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }} ${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Check runtime/externals hash # Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash - name: Compute/Compare runtime and externals Hash
shell: bash shell: bash
@@ -80,6 +73,13 @@ jobs:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}} DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}} EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Create runner package tar.gz/zip # Create runner package tar.gz/zip
- name: Package Release - name: Package Release
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'

View File

@@ -1 +1 @@
6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e 6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51

View File

@@ -1 +1 @@
b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081 711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78

View File

@@ -1 +1 @@
af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147 a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a

View File

@@ -1 +1 @@
aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036 8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b

View File

@@ -1 +1 @@
40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665 f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18

View File

@@ -53,3 +53,11 @@ async function run(): Promise<void> {
} }
run() run()
.then(out => {
console.log(out)
process.exit(0)
})
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -3,7 +3,7 @@ PACKAGERUNTIME=$1
PRECACHE=$2 PRECACHE=$2
NODE_URL=https://nodejs.org/dist NODE_URL=https://nodejs.org/dist
NODE12_VERSION="12.13.1" NODE12_VERSION="12.22.7"
NODE16_VERSION="16.13.0" NODE16_VERSION="16.13.0"
get_abs_path() { get_abs_path() {
@@ -143,7 +143,7 @@ fi
# Download the external tools for Linux PACKAGERUNTIMEs. # Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
fi fi

View File

@@ -3,94 +3,136 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information. // Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process"); var childProcess = require("child_process");
var path = require("path") var path = require("path");
const { exit } = require("process");
var supported = ['linux', 'darwin'] var supported = ["linux", "darwin"];
if (supported.indexOf(process.platform) == -1) { if (supported.indexOf(process.platform) == -1) {
console.log('Unsupported platform: ' + process.platform); console.log("Unsupported platform: " + process.platform);
console.log('Supported platforms are: ' + supported.toString()); console.log("Supported platforms are: " + supported.toString());
process.exit(1); process.exit(1);
} }
var stopping = false; var stopping = false;
var listener = null; var listener = null;
var exitServiceAfterNFailures = Number(
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
);
if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var gracefulShutdown = function (code) {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () { var runService = function () {
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener'); var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var interactive = process.argv[2] === "interactive"; var interactive = process.argv[2] === "interactive";
if (!stopping) { if (!stopping) {
try { try {
if (interactive) { if (interactive) {
console.log('Starting Runner listener interactively'); console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env }); listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
} else { } else {
console.log('Starting Runner listener with startup type: service'); console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env }); listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
} }
console.log(`Started listener process, pid: ${listener.pid}`); console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on('data', (data) => { listener.stdout.on("data", (data) => {
process.stdout.write(data.toString('utf8')); if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
}); });
listener.stderr.on('data', (data) => { listener.stderr.on("data", (data) => {
process.stdout.write(data.toString('utf8')); process.stdout.write(data.toString("utf8"));
}); });
listener.on("error", (err) => { listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`); console.log(`Runner listener fail to start with error ${err.message}`);
}); });
listener.on('close', (code) => { listener.on("close", (code) => {
console.log(`Runner listener exited with error code ${code}`); console.log(`Runner listener exited with error code ${code}`);
if (code === 0) { if (code === 0) {
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.'); console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
stopping = true; stopping = true;
} else if (code === 1) { } else if (code === 1) {
console.log('Runner listener exit with terminated error, stop the service, no retry needed.'); console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
stopping = true; stopping = true;
} else if (code === 2) { } else if (code === 2) {
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.'); console.log(
} else if (code === 3) { "Runner listener exit with retryable error, re-launch runner in 5 seconds."
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.'); );
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else { } else {
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.'); var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown(5);
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
} }
if (!stopping) { if (!stopping) {
setTimeout(runService, 5000); setTimeout(runService, 5000);
} }
}); });
} catch (ex) { } catch (ex) {
console.log(ex); console.log(ex);
} }
} }
} };
runService(); runService();
console.log('Started running service'); console.log("Started running service");
var gracefulShutdown = function (code) { process.on("SIGINT", () => {
console.log('Shutting down runner listener');
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
}
}
process.on('SIGINT', () => {
gracefulShutdown(0); gracefulShutdown(0);
}); });
process.on('SIGTERM', () => { process.on("SIGTERM", () => {
gracefulShutdown(0); gracefulShutdown(0);
}); });

View File

@@ -1621,7 +1621,15 @@ function run() {
} }
}); });
} }
run(); run()
.then(out => {
console.log(out);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});
/***/ }), /***/ }),

View File

@@ -216,6 +216,8 @@ namespace GitHub.Runner.Common
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture))); _userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture))); _userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
} }
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
} }
public string GetDirectory(WellKnownDirectory directory) public string GetDirectory(WellKnownDirectory directory)

View File

@@ -285,7 +285,7 @@ namespace GitHub.Runner.Listener
{ {
// at this point, the job execution might encounter some dead lock and even not able to be cancelled. // at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// no need to localize the exception string should never happen. // no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds."); throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds.");
} }
} }
else else
@@ -363,7 +363,7 @@ namespace GitHub.Runner.Listener
Trace.Info($"Start renew job request {requestId} for job {message.JobId}."); Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token); Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is canceled // wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail // not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken)); await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
@@ -704,7 +704,7 @@ namespace GitHub.Runner.Listener
{ {
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance(); // OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired. // Stop renew only on cancellation token fired.
Trace.Info($"job renew has been canceled, stop renew job request {requestId}."); Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
return; return;
} }
catch (Exception ex) catch (Exception ex)
@@ -762,7 +762,7 @@ namespace GitHub.Runner.Listener
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
{ {
Trace.Info($"job renew has been canceled, stop renew job request {requestId}."); Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
} }
} }
else else

View File

@@ -504,7 +504,7 @@ namespace GitHub.Runner.Listener
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
{ {
Trace.Info($"Runner download has been canceled."); Trace.Info($"Runner download has been cancelled.");
throw; throw;
} }
catch (Exception ex) catch (Exception ex)

View File

@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
} }
else else
{ {
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time. // delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock"); string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile)) if (File.Exists(lockFile))
{ {
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
} }
} }
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time. // delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile)) if (File.Exists(shallowLockFile))
{ {

View File

@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
} }
else else
{ {
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time. // delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
string lockFile = Path.Combine(targetPath, ".git\\index.lock"); string lockFile = Path.Combine(targetPath, ".git\\index.lock");
if (File.Exists(lockFile)) if (File.Exists(lockFile))
{ {
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
} }
} }
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time. // delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
if (File.Exists(shallowLockFile)) if (File.Exists(shallowLockFile))
{ {

View File

@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
} }
// Create a new token source for the parallel query. The parallel query should be // Create a new token source for the parallel query. The parallel query should be
// canceled after the first error is encountered. Otherwise the number of exceptions // cancelled after the first error is encountered. Otherwise the number of exceptions
// could get out of control for a large directory with access denied on every file. // could get out of control for a large directory with access denied on every file.
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
{ {

View File

@@ -654,7 +654,7 @@ namespace GitHub.Runner.Worker
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken); actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break; break;
} }
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled. catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
{ {
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry // UnresolvableActionDownloadInfoException is a 422 client error, don't retry
// Some possible cases are: // Some possible cases are:

View File

@@ -181,6 +181,10 @@ namespace GitHub.Runner.Worker
{ {
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'"); throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
} }
if (newline == null)
{
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
}
endIndex = index - newline.Length; endIndex = index - newline.Length;
tempLine = ReadLine(text, ref index, out newline); tempLine = ReadLine(text, ref index, out newline);
} }

View File

@@ -299,7 +299,7 @@ namespace GitHub.Runner.Worker.Handlers
// Register job cancellation call back only if job cancellation token not been fire before each step run // Register job cancellation call back only if job cancellation token not been fire before each step run
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested) if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{ {
// Test the condition again. The job was canceled after the condition was originally evaluated. // Test the condition again. The job was cancelled after the condition was originally evaluated.
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() => jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
{ {
// Mark job as cancelled // Mark job as cancelled
@@ -399,7 +399,7 @@ namespace GitHub.Runner.Worker.Handlers
jobCancelRegister = null; jobCancelRegister = null;
} }
} }
// Check failed or canceled // Check failed or cancelled
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled) if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
{ {
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'."); Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");

View File

@@ -131,9 +131,9 @@ namespace GitHub.Runner.Worker
} }
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested) catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
{ {
// set the job to canceled // set the job to cancelled
// don't log error issue to job ExecutionContext, since server owns the job level issue // don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job is canceled during initialize."); Trace.Error($"Job is cancelled during initialize.");
Trace.Error($"Caught exception: {ex}"); Trace.Error($"Caught exception: {ex}");
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled); return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
} }

View File

@@ -130,7 +130,7 @@ namespace GitHub.Runner.Worker
// Register job cancellation call back only if job cancellation token not been fire before each step run // Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested) if (!jobContext.CancellationToken.IsCancellationRequested)
{ {
// Test the condition again. The job was canceled after the condition was originally evaluated. // Test the condition again. The job was cancelled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() => jobCancelRegister = jobContext.CancellationToken.Register(() =>
{ {
// Mark job as cancelled // Mark job as cancelled

View File

@@ -774,8 +774,15 @@ namespace GitHub.Runner.Common.Tests.Listener
var traceFile = Path.GetTempFileName(); var traceFile = Path.GetTempFileName();
File.Copy(hc.TraceFileName, traceFile, true); File.Copy(hc.TraceFileName, traceFile, true);
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
{
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile)); Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
} }
else
{
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
}
}
} }
finally finally
{ {

View File

@@ -302,6 +302,50 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_MissingNewLine()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
"line one",
"line two",
"line three",
"EOF",
};
WriteContent(envFile, content, " ");
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
Assert.Contains("Matching delimiter not found", ex.Message);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLinesEnv()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
@"line one
line two
line three",
"EOF",
};
WriteContent(envFile, content, " ");
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
Assert.Contains("EOF marker missing new line", ex.Message);
}
}
#if OS_WINDOWS #if OS_WINDOWS
[Fact] [Fact]
[Trait("Level", "L0")] [Trait("Level", "L0")]