mirror of
https://github.com/actions/runner.git
synced 2025-12-10 20:36:49 +00:00
Compare commits
6 Commits
v2.313.0
...
fhammerl/f
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f6b4be9af | ||
|
|
b3938017b5 | ||
|
|
acb46a3e1e | ||
|
|
5e90b3ebb5 | ||
|
|
493a0bd8fc | ||
|
|
e5d19459a7 |
14
.github/workflows/build.yml
vendored
14
.github/workflows/build.yml
vendored
@@ -50,13 +50,6 @@ jobs:
|
|||||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
# Run tests
|
|
||||||
- name: L0
|
|
||||||
run: |
|
|
||||||
${{ matrix.devScript }} test
|
|
||||||
working-directory: src
|
|
||||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
|
|
||||||
|
|
||||||
# Check runtime/externals hash
|
# Check runtime/externals hash
|
||||||
- name: Compute/Compare runtime and externals Hash
|
- name: Compute/Compare runtime and externals Hash
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,6 +73,13 @@ jobs:
|
|||||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
- name: L0
|
||||||
|
run: |
|
||||||
|
${{ matrix.devScript }} test
|
||||||
|
working-directory: src
|
||||||
|
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
|
||||||
|
|
||||||
# Create runner package tar.gz/zip
|
# Create runner package tar.gz/zip
|
||||||
- name: Package Release
|
- name: Package Release
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
|
|||||||
2
src/Misc/contentHash/externals/linux-arm
vendored
2
src/Misc/contentHash/externals/linux-arm
vendored
@@ -1 +1 @@
|
|||||||
6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e
|
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
|
||||||
2
src/Misc/contentHash/externals/linux-arm64
vendored
2
src/Misc/contentHash/externals/linux-arm64
vendored
@@ -1 +1 @@
|
|||||||
b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081
|
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
|
||||||
2
src/Misc/contentHash/externals/linux-x64
vendored
2
src/Misc/contentHash/externals/linux-x64
vendored
@@ -1 +1 @@
|
|||||||
af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147
|
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
|
||||||
2
src/Misc/contentHash/externals/osx-x64
vendored
2
src/Misc/contentHash/externals/osx-x64
vendored
@@ -1 +1 @@
|
|||||||
aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036
|
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
|
||||||
2
src/Misc/contentHash/externals/win-x64
vendored
2
src/Misc/contentHash/externals/win-x64
vendored
@@ -1 +1 @@
|
|||||||
40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665
|
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
|
||||||
@@ -53,3 +53,11 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
run()
|
run()
|
||||||
|
.then(out => {
|
||||||
|
console.log(out)
|
||||||
|
process.exit(0)
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
console.error(err)
|
||||||
|
process.exit(1)
|
||||||
|
})
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ PACKAGERUNTIME=$1
|
|||||||
PRECACHE=$2
|
PRECACHE=$2
|
||||||
|
|
||||||
NODE_URL=https://nodejs.org/dist
|
NODE_URL=https://nodejs.org/dist
|
||||||
NODE12_VERSION="12.13.1"
|
NODE12_VERSION="12.22.7"
|
||||||
NODE16_VERSION="16.13.0"
|
NODE16_VERSION="16.13.0"
|
||||||
|
|
||||||
get_abs_path() {
|
get_abs_path() {
|
||||||
@@ -143,7 +143,7 @@ fi
|
|||||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
|
||||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -3,94 +3,136 @@
|
|||||||
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
||||||
|
|
||||||
var childProcess = require("child_process");
|
var childProcess = require("child_process");
|
||||||
var path = require("path")
|
var path = require("path");
|
||||||
|
const { exit } = require("process");
|
||||||
|
|
||||||
var supported = ['linux', 'darwin']
|
var supported = ["linux", "darwin"];
|
||||||
|
|
||||||
if (supported.indexOf(process.platform) == -1) {
|
if (supported.indexOf(process.platform) == -1) {
|
||||||
console.log('Unsupported platform: ' + process.platform);
|
console.log("Unsupported platform: " + process.platform);
|
||||||
console.log('Supported platforms are: ' + supported.toString());
|
console.log("Supported platforms are: " + supported.toString());
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
var stopping = false;
|
var stopping = false;
|
||||||
var listener = null;
|
var listener = null;
|
||||||
|
|
||||||
var runService = function () {
|
var exitServiceAfterNFailures = Number(
|
||||||
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
|
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
|
||||||
var interactive = process.argv[2] === "interactive";
|
);
|
||||||
|
|
||||||
if (!stopping) {
|
if (exitServiceAfterNFailures <= 0) {
|
||||||
try {
|
exitServiceAfterNFailures = NaN;
|
||||||
if (interactive) {
|
|
||||||
console.log('Starting Runner listener interactively');
|
|
||||||
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
|
|
||||||
} else {
|
|
||||||
console.log('Starting Runner listener with startup type: service');
|
|
||||||
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
|
||||||
|
|
||||||
listener.stdout.on('data', (data) => {
|
|
||||||
process.stdout.write(data.toString('utf8'));
|
|
||||||
});
|
|
||||||
|
|
||||||
listener.stderr.on('data', (data) => {
|
|
||||||
process.stdout.write(data.toString('utf8'));
|
|
||||||
});
|
|
||||||
|
|
||||||
listener.on("error", (err) => {
|
|
||||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
listener.on('close', (code) => {
|
|
||||||
console.log(`Runner listener exited with error code ${code}`);
|
|
||||||
|
|
||||||
if (code === 0) {
|
|
||||||
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
|
|
||||||
stopping = true;
|
|
||||||
} else if (code === 1) {
|
|
||||||
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
|
|
||||||
stopping = true;
|
|
||||||
} else if (code === 2) {
|
|
||||||
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
|
|
||||||
} else if (code === 3) {
|
|
||||||
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
|
|
||||||
} else {
|
|
||||||
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!stopping) {
|
|
||||||
setTimeout(runService, 5000);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
} catch (ex) {
|
|
||||||
console.log(ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
runService();
|
var consecutiveFailureCount = 0;
|
||||||
console.log('Started running service');
|
|
||||||
|
|
||||||
var gracefulShutdown = function (code) {
|
var gracefulShutdown = function (code) {
|
||||||
console.log('Shutting down runner listener');
|
console.log("Shutting down runner listener");
|
||||||
stopping = true;
|
stopping = true;
|
||||||
if (listener) {
|
if (listener) {
|
||||||
console.log('Sending SIGINT to runner listener to stop');
|
console.log("Sending SIGINT to runner listener to stop");
|
||||||
listener.kill('SIGINT');
|
listener.kill("SIGINT");
|
||||||
|
|
||||||
console.log('Sending SIGKILL to runner listener');
|
console.log("Sending SIGKILL to runner listener");
|
||||||
setTimeout(() => listener.kill('SIGKILL'), 30000).unref();
|
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var runService = function () {
|
||||||
|
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
|
||||||
|
var interactive = process.argv[2] === "interactive";
|
||||||
|
|
||||||
|
if (!stopping) {
|
||||||
|
try {
|
||||||
|
if (interactive) {
|
||||||
|
console.log("Starting Runner listener interactively");
|
||||||
|
listener = childProcess.spawn(listenerExePath, ["run"], {
|
||||||
|
env: process.env,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.log("Starting Runner listener with startup type: service");
|
||||||
|
listener = childProcess.spawn(
|
||||||
|
listenerExePath,
|
||||||
|
["run", "--startuptype", "service"],
|
||||||
|
{ env: process.env }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||||
|
|
||||||
|
listener.stdout.on("data", (data) => {
|
||||||
|
if (data.toString("utf8").includes("Listening for Jobs")) {
|
||||||
|
consecutiveFailureCount = 0;
|
||||||
|
}
|
||||||
|
process.stdout.write(data.toString("utf8"));
|
||||||
|
});
|
||||||
|
|
||||||
|
listener.stderr.on("data", (data) => {
|
||||||
|
process.stdout.write(data.toString("utf8"));
|
||||||
|
});
|
||||||
|
|
||||||
|
listener.on("error", (err) => {
|
||||||
|
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
listener.on("close", (code) => {
|
||||||
|
console.log(`Runner listener exited with error code ${code}`);
|
||||||
|
|
||||||
|
if (code === 0) {
|
||||||
|
console.log(
|
||||||
|
"Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||||
|
);
|
||||||
|
stopping = true;
|
||||||
|
} else if (code === 1) {
|
||||||
|
console.log(
|
||||||
|
"Runner listener exit with terminated error, stop the service, no retry needed."
|
||||||
|
);
|
||||||
|
stopping = true;
|
||||||
|
} else if (code === 2) {
|
||||||
|
console.log(
|
||||||
|
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||||
|
);
|
||||||
|
consecutiveFailureCount = 0;
|
||||||
|
} else if (code === 3 || code === 4) {
|
||||||
|
console.log(
|
||||||
|
"Runner listener exit because of updating, re-launch runner in 5 seconds."
|
||||||
|
);
|
||||||
|
consecutiveFailureCount = 0;
|
||||||
|
} else {
|
||||||
|
var messagePrefix = "Runner listener exit with undefined return code";
|
||||||
|
consecutiveFailureCount++;
|
||||||
|
if (
|
||||||
|
!isNaN(exitServiceAfterNFailures) &&
|
||||||
|
consecutiveFailureCount >= exitServiceAfterNFailures
|
||||||
|
) {
|
||||||
|
console.error(
|
||||||
|
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
|
||||||
|
);
|
||||||
|
gracefulShutdown(5);
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!stopping) {
|
||||||
|
setTimeout(runService, 5000);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (ex) {
|
||||||
|
console.log(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
process.on('SIGINT', () => {
|
runService();
|
||||||
gracefulShutdown(0);
|
console.log("Started running service");
|
||||||
|
|
||||||
|
process.on("SIGINT", () => {
|
||||||
|
gracefulShutdown(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
process.on('SIGTERM', () => {
|
process.on("SIGTERM", () => {
|
||||||
gracefulShutdown(0);
|
gracefulShutdown(0);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1621,7 +1621,15 @@ function run() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
run();
|
run()
|
||||||
|
.then(out => {
|
||||||
|
console.log(out);
|
||||||
|
process.exit(0);
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|||||||
@@ -216,6 +216,8 @@ namespace GitHub.Runner.Common
|
|||||||
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
|
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
|
||||||
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
|
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
|
||||||
}
|
}
|
||||||
|
|
||||||
public string GetDirectory(WellKnownDirectory directory)
|
public string GetDirectory(WellKnownDirectory directory)
|
||||||
|
|||||||
@@ -285,7 +285,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||||
// no need to localize the exception string should never happen.
|
// no need to localize the exception string should never happen.
|
||||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -363,7 +363,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||||
|
|
||||||
// wait till first renew succeed or job request is canceled
|
// wait till first renew succeed or job request is cancelled
|
||||||
// not even start worker if the first renew fail
|
// not even start worker if the first renew fail
|
||||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||||
|
|
||||||
@@ -704,7 +704,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
|
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
|
||||||
// Stop renew only on cancellation token fired.
|
// Stop renew only on cancellation token fired.
|
||||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
@@ -762,7 +762,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -504,7 +504,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
Trace.Info($"Runner download has been canceled.");
|
Trace.Info($"Runner download has been cancelled.");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||||
if (File.Exists(lockFile))
|
if (File.Exists(lockFile))
|
||||||
{
|
{
|
||||||
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||||
if (File.Exists(shallowLockFile))
|
if (File.Exists(shallowLockFile))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||||
if (File.Exists(lockFile))
|
if (File.Exists(lockFile))
|
||||||
{
|
{
|
||||||
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||||
if (File.Exists(shallowLockFile))
|
if (File.Exists(shallowLockFile))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create a new token source for the parallel query. The parallel query should be
|
// Create a new token source for the parallel query. The parallel query should be
|
||||||
// canceled after the first error is encountered. Otherwise the number of exceptions
|
// cancelled after the first error is encountered. Otherwise the number of exceptions
|
||||||
// could get out of control for a large directory with access denied on every file.
|
// could get out of control for a large directory with access denied on every file.
|
||||||
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
|
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -654,7 +654,7 @@ namespace GitHub.Runner.Worker
|
|||||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
|
||||||
{
|
{
|
||||||
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
|
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
|
||||||
// Some possible cases are:
|
// Some possible cases are:
|
||||||
|
|||||||
@@ -181,6 +181,10 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
||||||
}
|
}
|
||||||
|
if (newline == null)
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
|
||||||
|
}
|
||||||
endIndex = index - newline.Length;
|
endIndex = index - newline.Length;
|
||||||
tempLine = ReadLine(text, ref index, out newline);
|
tempLine = ReadLine(text, ref index, out newline);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -299,7 +299,7 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
||||||
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
// Test the condition again. The job was cancelled after the condition was originally evaluated.
|
||||||
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
|
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
|
||||||
{
|
{
|
||||||
// Mark job as cancelled
|
// Mark job as cancelled
|
||||||
@@ -399,7 +399,7 @@ namespace GitHub.Runner.Worker.Handlers
|
|||||||
jobCancelRegister = null;
|
jobCancelRegister = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Check failed or canceled
|
// Check failed or cancelled
|
||||||
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||||
{
|
{
|
||||||
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
|
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
|
||||||
|
|||||||
@@ -131,9 +131,9 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
// set the job to canceled
|
// set the job to cancelled
|
||||||
// don't log error issue to job ExecutionContext, since server owns the job level issue
|
// don't log error issue to job ExecutionContext, since server owns the job level issue
|
||||||
Trace.Error($"Job is canceled during initialize.");
|
Trace.Error($"Job is cancelled during initialize.");
|
||||||
Trace.Error($"Caught exception: {ex}");
|
Trace.Error($"Caught exception: {ex}");
|
||||||
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
|
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ namespace GitHub.Runner.Worker
|
|||||||
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
||||||
if (!jobContext.CancellationToken.IsCancellationRequested)
|
if (!jobContext.CancellationToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
// Test the condition again. The job was cancelled after the condition was originally evaluated.
|
||||||
jobCancelRegister = jobContext.CancellationToken.Register(() =>
|
jobCancelRegister = jobContext.CancellationToken.Register(() =>
|
||||||
{
|
{
|
||||||
// Mark job as cancelled
|
// Mark job as cancelled
|
||||||
|
|||||||
@@ -774,7 +774,14 @@ namespace GitHub.Runner.Common.Tests.Listener
|
|||||||
|
|
||||||
var traceFile = Path.GetTempFileName();
|
var traceFile = Path.GetTempFileName();
|
||||||
File.Copy(hc.TraceFileName, traceFile, true);
|
File.Copy(hc.TraceFileName, traceFile, true);
|
||||||
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
|
if (File.ReadAllText(traceFile).Contains("Use trimmed (runtime+externals) package"))
|
||||||
|
{
|
||||||
|
Assert.Contains("Something wrong with the trimmed runner package, failback to use the full package for runner updates", File.ReadAllText(traceFile));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
hc.GetTrace().Warning("Skipping the 'TestSelfUpdateAsync_FallbackToFullPackage' test, as the `externals` or `runtime` hashes have been updated");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
|
|||||||
@@ -302,6 +302,50 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
[Trait("Level", "L0")]
|
||||||
|
[Trait("Category", "Worker")]
|
||||||
|
public void SetEnvFileCommand_Heredoc_MissingNewLine()
|
||||||
|
{
|
||||||
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||||
|
Assert.Contains("Matching delimiter not found", ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
[Trait("Level", "L0")]
|
||||||
|
[Trait("Category", "Worker")]
|
||||||
|
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLinesEnv()
|
||||||
|
{
|
||||||
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
@"line one
|
||||||
|
line two
|
||||||
|
line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||||
|
Assert.Contains("EOF marker missing new line", ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
|
|||||||
Reference in New Issue
Block a user