Compare commits

...

15 Commits

Author SHA1 Message Date
Salman Muin Kayser Chishti
18f53d3c9e Include workflows in write-all when feature flag is enabled
The write-all permission level should include workflows:write when the
AllowWorkflowsPermission feature flag is enabled, matching the behavior
of other gated permissions like copilot-requests. Previously workflows
was unconditionally excluded from write-all. This aligns with the ADR
decision that write-all means permissive access.
2026-04-11 08:49:41 +01:00
Salman Muin Kayser Chishti
273538003e Add workflows permission scope to WorkflowParser
Add 'workflows' as a recognized permission scope for GITHUB_TOKEN,
gated behind AllowWorkflowsPermission feature flag.

Changes:
- Permissions.cs: Add Workflows property, copy constructor, comparison
  key mapping. Excluded from write-all/read-all bulk constructors.
- WorkflowTemplateConverter.cs: Parse 'workflows' permission with
  feature flag guard. Read downgrades to NoAccess (write-only scope).
- WorkflowFeatures.cs: Add AllowWorkflowsPermission flag, default false.
2026-04-10 23:33:55 +01:00
Salman Chishti
4a587ada27 feat: add job.workflow_* typed accessors to JobContext (#4335) 2026-04-10 19:39:33 +01:00
Copilot
182a433782 Bump System.Formats.Asn1, Cryptography.Pkcs, ProtectedData, ServiceController, CodePages, Threading.Channels, @actions/glob, @typescript-eslint/parser, lint-staged, picomatch (#4333)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-04-10 12:40:28 +01:00
Salman Chishti
8d35e710da fix: only show changed versions in node upgrade PR description (#4332) 2026-04-10 11:34:08 +01:00
dependabot[bot]
2bcc65e864 Bump typescript from 5.9.3 to 6.0.2 in /src/Misc/expressionFunc/hashFiles (#4329)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-04-10 09:01:34 +01:00
dependabot[bot]
1ba5fdfd88 Bump actions/github-script from 8 to 9 (#4331)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-10 08:54:56 +01:00
dependabot[bot]
580116c18b Bump @typescript-eslint/eslint-plugin from 8.57.2 to 8.58.1 in /src/Misc/expressionFunc/hashFiles (#4327)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-04-08 07:44:38 +00:00
github-actions[bot]
c9a1751d87 Update Docker to v29.3.1 and Buildx to v0.33.0 (#4324)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-04-08 08:40:32 +01:00
Francesco Renzi
7711dc53e2 Add devtunnel connection for debugger jobs (#4317) 2026-04-07 12:51:33 +00:00
dependabot[bot]
df507886cb Bump brace-expansion in /src/Misc/expressionFunc/hashFiles (#4318)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-04-01 13:19:42 +01:00
Tingluo Huang
5c6dd47e76 Add support for Bearer token in action archive downloads (#4321) 2026-03-31 17:51:01 -04:00
Stefan Penner
7ff994b932 Batch and deduplicate action resolution across composite depths (#4296)
Co-authored-by: Stefan Penner <spenner@linkedin.com>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-31 13:28:43 -04:00
github-actions[bot]
b9275b59cf chore: update Node versions (#4319)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2026-03-30 12:58:17 +00:00
eric sciple
f0c228635e Remove AllowCaseFunction feature flag (#4316) 2026-03-27 11:45:42 -05:00
44 changed files with 2354 additions and 1021 deletions

View File

@@ -99,7 +99,7 @@ jobs:
- name: Get latest runner version
id: latest_runner
uses: actions/github-script@v8
uses: actions/github-script@v9
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |

View File

@@ -26,7 +26,7 @@ jobs:
- name: Compute image version
id: image
uses: actions/github-script@v8
uses: actions/github-script@v9
with:
script: |
const fs = require('fs');

View File

@@ -159,18 +159,36 @@ jobs:
git config --global user.name "github-actions[bot]"
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
# Build version summary for commit message and PR body (only include changed versions)
COMMIT_VERSIONS=""
PR_VERSION_LINES=""
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
COMMIT_VERSIONS="20: $NODE20_VERSION"
PR_VERSION_LINES="- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION"
fi
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
if [ -n "$COMMIT_VERSIONS" ]; then
COMMIT_VERSIONS="$COMMIT_VERSIONS, 24: $NODE24_VERSION"
else
COMMIT_VERSIONS="24: $NODE24_VERSION"
fi
PR_VERSION_LINES="${PR_VERSION_LINES:+$PR_VERSION_LINES
}- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION"
fi
# Create branch and commit changes
branch_name="chore/update-node"
git checkout -b "$branch_name"
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
git commit -a -m "chore: update Node versions ($COMMIT_VERSIONS)"
git push --force origin "$branch_name"
# Create PR body using here-doc for proper formatting
cat > pr_body.txt << EOF
Automated Node.js version update:
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
$PR_VERSION_LINES
This update ensures we're using the latest stable Node.js versions for security and performance improvements.

View File

@@ -16,7 +16,7 @@ jobs:
# Make sure ./releaseVersion match ./src/runnerversion
# Query GitHub release ensure version is not used
- name: Check version
uses: actions/github-script@v8
uses: actions/github-script@v9
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -171,7 +171,7 @@ jobs:
# Create ReleaseNote file
- name: Create ReleaseNote
id: releaseNote
uses: actions/github-script@v8
uses: actions/github-script@v9
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -300,7 +300,7 @@ jobs:
- name: Compute image version
id: image
uses: actions/github-script@v8
uses: actions/github-script@v9
with:
script: |
const fs = require('fs');

View File

@@ -5,8 +5,8 @@ ARG TARGETOS
ARG TARGETARCH
ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
ARG DOCKER_VERSION=29.3.0
ARG BUILDX_VERSION=0.32.1
ARG DOCKER_VERSION=29.3.1
ARG BUILDX_VERSION=0.33.0
RUN apt update -y && apt install curl unzip -y

File diff suppressed because it is too large Load Diff

View File

@@ -32,20 +32,20 @@
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/glob": "^0.4.0"
"@actions/glob": "^0.6.1"
},
"devDependencies": {
"@stylistic/eslint-plugin": "^5.10.0",
"@types/node": "^22.0.0",
"@typescript-eslint/eslint-plugin": "^8.57.2",
"@typescript-eslint/parser": "^8.0.0",
"@typescript-eslint/eslint-plugin": "^8.58.1",
"@typescript-eslint/parser": "^8.58.1",
"@vercel/ncc": "^0.38.3",
"eslint": "^8.47.0",
"eslint-plugin-github": "^4.10.2",
"eslint-plugin-prettier": "^5.0.0",
"husky": "^9.1.7",
"lint-staged": "^15.5.0",
"lint-staged": "^16.4.0",
"prettier": "^3.0.3",
"typescript": "^5.9.3"
"typescript": "^6.0.2"
}
}

View File

@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
NODE20_VERSION="20.20.1"
NODE24_VERSION="24.14.0"
NODE20_VERSION="20.20.2"
NODE24_VERSION="24.14.1"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args

View File

@@ -177,6 +177,8 @@ namespace GitHub.Runner.Common
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
public static readonly string SendJobLevelAnnotations = "actions_send_job_level_annotations";
public static readonly string EmitCompositeMarkers = "actions_runner_emit_composite_markers";
public static readonly string BatchActionResolution = "actions_batch_action_resolution";
public static readonly string UseBearerTokenForCodeload = "actions_use_bearer_token_for_codeload";
}
// Node version migration related constants

View File

@@ -17,9 +17,9 @@
<ItemGroup>
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="10.0.3" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="10.0.3" />
<PackageReference Include="System.Threading.Channels" Version="10.0.3" />
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">

View File

@@ -12,6 +12,13 @@ namespace GitHub.Runner.Common
private ISecretMasker _secretMasker;
private TraceSource _traceSource;
/// <summary>
/// The underlying <see cref="System.Diagnostics.TraceSource"/> for this instance.
/// Useful when third-party libraries require a <see cref="System.Diagnostics.TraceSource"/>
/// to route their diagnostics into the runner's log infrastructure.
/// </summary>
public TraceSource Source => _traceSource;
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener = null)
{
ArgUtil.NotNull(secretMasker, nameof(secretMasker));

View File

@@ -22,8 +22,8 @@
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="10.0.3" />
<PackageReference Include="System.ServiceProcess.ServiceController" Version="10.0.3" />
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">

View File

@@ -15,9 +15,9 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Text.Encoding.CodePages" Version="10.0.3" />
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
<PackageReference Include="System.Threading.Channels" Version="10.0.3" />
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">

View File

@@ -79,6 +79,13 @@ namespace GitHub.Runner.Worker
PreStepTracker = new Dictionary<Guid, IActionRunner>()
};
var containerSetupSteps = new List<JobExtensionRunner>();
var batchActionResolution = (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.BatchActionResolution) ?? false)
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION"));
// Stack-local cache: same action (owner/repo@ref) is resolved only once,
// even if it appears at multiple depths in a composite tree.
var resolvedDownloadInfos = batchActionResolution
? new Dictionary<string, WebApi.ActionDownloadInfo>(StringComparer.Ordinal)
: null;
var depth = 0;
// We are running at the start of a job
if (rootStepId == default(Guid))
@@ -105,7 +112,9 @@ namespace GitHub.Runner.Worker
PrepareActionsState result = new PrepareActionsState();
try
{
result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
result = batchActionResolution
? await PrepareActionsRecursiveAsync(executionContext, state, actions, resolvedDownloadInfos, depth, rootStepId)
: await PrepareActionsRecursiveLegacyAsync(executionContext, state, actions, depth, rootStepId);
}
catch (FailedToResolveActionDownloadInfoException ex)
{
@@ -169,7 +178,192 @@ namespace GitHub.Runner.Worker
return new PrepareResult(containerSetupSteps, result.PreStepTracker);
}
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos, Int32 depth = 0, Guid parentStepId = default(Guid))
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
if (depth > Constants.CompositeActionsMaxDepth)
{
throw new Exception($"Composite action depth exceeded max depth {Constants.CompositeActionsMaxDepth}");
}
var repositoryActions = new List<Pipelines.ActionStep>();
foreach (var action in actions)
{
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
{
ArgUtil.NotNull(action, nameof(action));
var containerReference = action.Reference as Pipelines.ContainerRegistryReference;
ArgUtil.NotNull(containerReference, nameof(containerReference));
ArgUtil.NotNullOrEmpty(containerReference.Image, nameof(containerReference.Image));
if (!state.ImagesToPull.ContainsKey(containerReference.Image))
{
state.ImagesToPull[containerReference.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
state.ImagesToPull[containerReference.Image].Add(action.Id);
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Resolve download info, skipping any actions already cached.
await ResolveNewActionsAsync(executionContext, repositoryActions, resolvedDownloadInfos);
// Download each action.
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!resolvedDownloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// Parse action.yml and collect composite sub-actions for batched
// resolution below. Pre/post step registration is deferred until
// after recursion so that HasPre/HasPost reflect the full subtree.
var nextLevel = new List<(Pipelines.ActionStep action, Guid parentId)>();
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null && setupInfo.Container != null)
{
if (!string.IsNullOrEmpty(setupInfo.Container.Image))
{
if (!state.ImagesToPull.ContainsKey(setupInfo.Container.Image))
{
state.ImagesToPull[setupInfo.Container.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to pull image '{setupInfo.Container.Image}'");
state.ImagesToPull[setupInfo.Container.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.Container.ActionRepository, nameof(setupInfo.Container.ActionRepository));
if (!state.ImagesToBuild.ContainsKey(setupInfo.Container.ActionRepository))
{
state.ImagesToBuild[setupInfo.Container.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to build image '{setupInfo.Container.Dockerfile}'");
state.ImagesToBuild[setupInfo.Container.ActionRepository].Add(action.Id);
state.ImagesToBuildInfo[setupInfo.Container.ActionRepository] = setupInfo.Container;
}
}
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
{
foreach (var step in setupInfo.Steps)
{
nextLevel.Add((step, action.Id));
}
}
}
// Resolve all next-level sub-actions in one batch API call,
// then recurse per parent (which hits the cache, not the API).
if (nextLevel.Count > 0)
{
var nextLevelRepoActions = nextLevel
.Where(x => x.action.Reference.Type == Pipelines.ActionSourceType.Repository)
.Select(x => x.action)
.ToList();
await ResolveNewActionsAsync(executionContext, nextLevelRepoActions, resolvedDownloadInfos);
foreach (var group in nextLevel.GroupBy(x => x.parentId))
{
var groupActions = group.Select(x => x.action).ToList();
state = await PrepareActionsRecursiveAsync(executionContext, state, groupActions, resolvedDownloadInfos, depth + 1, group.Key);
}
}
// Register pre/post steps after recursion so that HasPre/HasPost
// are correct (they depend on _cachedEmbeddedPreSteps/PostSteps
// being populated by the recursive calls above).
foreach (var action in repositoryActions)
{
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
Trace.Info($"Add 'pre' execution for {action.Id}");
// Root Step
if (depth < 1)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
state.PreStepTracker[action.Id] = actionRunner;
}
// Embedded Step
else
{
if (!_cachedEmbeddedPreSteps.ContainsKey(parentStepId))
{
_cachedEmbeddedPreSteps[parentStepId] = new List<Pipelines.ActionStep>();
}
// Clone action so we can modify the condition without affecting the original
var clonedAction = action.Clone() as Pipelines.ActionStep;
clonedAction.Condition = definition.Data.Execution.InitCondition;
_cachedEmbeddedPreSteps[parentStepId].Add(clonedAction);
}
}
if (definition.Data.Execution.HasPost && depth > 0)
{
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
{
// If we haven't done so already, add the parent to the post steps
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
}
// Clone action so we can modify the condition without affecting the original
var clonedAction = action.Clone() as Pipelines.ActionStep;
clonedAction.Condition = definition.Data.Execution.CleanupCondition;
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
}
}
else if (depth > 0)
{
// if we're in a composite action and haven't loaded the local action yet
// we assume it has a post step
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
{
// If we haven't done so already, add the parent to the post steps
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
}
// Clone action so we can modify the condition without affecting the original
var clonedAction = action.Clone() as Pipelines.ActionStep;
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
}
}
}
return state;
}
/// <summary>
/// Legacy (non-batched) action resolution. Each composite resolves its
/// sub-actions individually, with no cross-depth deduplication.
/// Used when the BatchActionResolution feature flag is disabled.
/// </summary>
private async Task<PrepareActionsState> PrepareActionsRecursiveLegacyAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
if (depth > Constants.CompositeActionsMaxDepth)
@@ -255,7 +449,7 @@ namespace GitHub.Runner.Worker
}
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
{
state = await PrepareActionsRecursiveAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
state = await PrepareActionsRecursiveLegacyAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
@@ -762,6 +956,33 @@ namespace GitHub.Runner.Worker
return actionDownloadInfos.Actions;
}
/// <summary>
/// Only resolves actions not already in resolvedDownloadInfos.
/// Results are cached for reuse at deeper recursion levels.
/// </summary>
private async Task ResolveNewActionsAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos)
{
var actionsToResolve = new List<Pipelines.ActionStep>();
var pendingKeys = new HashSet<string>(StringComparer.Ordinal);
foreach (var action in actions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (!string.IsNullOrEmpty(lookupKey) && !resolvedDownloadInfos.ContainsKey(lookupKey) && pendingKeys.Add(lookupKey))
{
actionsToResolve.Add(action);
}
}
if (actionsToResolve.Count > 0)
{
var downloadInfos = await GetDownloadInfoAsync(executionContext, actionsToResolve);
foreach (var kvp in downloadInfos)
{
resolvedDownloadInfos[kvp.Key] = kvp.Value;
}
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
@@ -1146,16 +1367,29 @@ namespace GitHub.Runner.Worker
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
private AuthenticationHeaderValue CreateAuthHeader(IExecutionContext executionContext, string downloadUrl, string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
if (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.UseBearerTokenForCodeload) == true &&
Uri.TryCreate(downloadUrl, UriKind.Absolute, out var parsedUrl) &&
!string.IsNullOrEmpty(parsedUrl?.Host) &&
!string.IsNullOrEmpty(parsedUrl?.PathAndQuery) &&
(parsedUrl.Host.StartsWith("codeload.", StringComparison.OrdinalIgnoreCase) || parsedUrl.PathAndQuery.StartsWith("/_codeload/", StringComparison.OrdinalIgnoreCase)))
{
Trace.Info("Using Bearer token for action archive download directly to codeload.");
return new AuthenticationHeaderValue("Bearer", token);
}
else
{
Trace.Info("Using Basic token for action archive download.");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
}
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
@@ -1180,7 +1414,7 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(executionContext, downloadUrl, downloadAuthToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var response = await httpClient.GetAsync(downloadUrl))

View File

@@ -316,7 +316,6 @@ namespace GitHub.Runner.Worker
Schema = _actionManifestSchema,
// TODO: Switch to real tracewriter for cutover
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
AllowCaseFunction = false,
};
// Expression values from execution context

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
@@ -315,7 +315,6 @@ namespace GitHub.Runner.Worker
maxBytes: 10 * 1024 * 1024),
Schema = _actionManifestSchema,
TraceWriter = executionContext.ToTemplateTraceWriter(),
AllowCaseFunction = false,
};
// Expression values from execution context

View File

@@ -1,7 +1,10 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http.Headers;
using System.Net.Sockets;
using System.Text;
using System.Threading;
@@ -9,6 +12,9 @@ using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Microsoft.DevTunnels.Connections;
using Microsoft.DevTunnels.Contracts;
using Microsoft.DevTunnels.Management;
using Newtonsoft.Json;
namespace GitHub.Runner.Worker.Dap
@@ -30,10 +36,10 @@ namespace GitHub.Runner.Worker.Dap
/// </summary>
public sealed class DapDebugger : RunnerService, IDapDebugger
{
private const int _defaultPort = 4711;
private const int _defaultTimeoutMinutes = 15;
private const string _portEnvironmentVariable = "ACTIONS_RUNNER_DAP_PORT";
private const string _timeoutEnvironmentVariable = "ACTIONS_RUNNER_DAP_CONNECTION_TIMEOUT";
private const int _defaultTunnelConnectTimeoutSeconds = 30;
private const string _tunnelConnectTimeoutSeconds = "ACTIONS_RUNNER_DAP_TUNNEL_CONNECT_TIMEOUT_SECONDS";
private const string _contentLengthHeader = "Content-Length: ";
private const int _maxMessageSize = 10 * 1024 * 1024; // 10 MB
private const int _maxHeaderLineLength = 8192; // 8 KB
@@ -58,6 +64,16 @@ namespace GitHub.Runner.Worker.Dap
private CancellationTokenRegistration? _cancellationRegistration;
private bool _isFirstStep = true;
// Dev Tunnel relay host for remote debugging
private TunnelRelayTunnelHost _tunnelRelayHost;
// Cancellation source for the connection loop, cancelled in StopAsync
// so AcceptTcpClientAsync unblocks cleanly without relying on listener disposal.
private CancellationTokenSource _loopCts;
// When true, skip tunnel relay startup (unit tests only)
internal bool SkipTunnelRelay { get; set; }
// Synchronization for step execution
private TaskCompletionSource<DapCommand> _commandTcs;
private readonly object _stateLock = new object();
@@ -101,22 +117,38 @@ namespace GitHub.Runner.Worker.Dap
Trace.Info("DapDebugger initialized");
}
public Task StartAsync(IExecutionContext jobContext)
public async Task StartAsync(IExecutionContext jobContext)
{
ArgUtil.NotNull(jobContext, nameof(jobContext));
var port = ResolvePort();
var debuggerConfig = jobContext.Global.Debugger;
Trace.Info($"Starting DAP debugger on port {port}");
if (!debuggerConfig.HasValidTunnel)
{
throw new ArgumentException(
"Debugger requires valid tunnel configuration (tunnelId, clusterId, hostToken, port).");
}
Trace.Info($"Starting DAP debugger on port {debuggerConfig.Tunnel.Port}");
_jobContext = jobContext;
_readyTcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
_listener = new TcpListener(IPAddress.Loopback, port);
_listener = new TcpListener(IPAddress.Loopback, debuggerConfig.Tunnel.Port);
_listener.Start();
Trace.Info($"DAP debugger listening on {_listener.LocalEndpoint}");
// Start Dev Tunnel relay so remote clients reach the local DAP port.
// The relay is torn down explicitly in StopAsync (after the DAP session
// is closed) so we do NOT pass the job cancellation token here — that
// would race with the DAP shutdown and drop the transport mid-protocol.
if (!SkipTunnelRelay)
{
await StartTunnelRelayAsync(debuggerConfig);
}
_state = DapSessionState.WaitingForConnection;
_connectionLoopTask = ConnectionLoopAsync(jobContext.CancellationToken);
_loopCts = CancellationTokenSource.CreateLinkedTokenSource(jobContext.CancellationToken);
_connectionLoopTask = ConnectionLoopAsync(_loopCts.Token);
_cancellationRegistration = jobContext.CancellationToken.Register(() =>
{
@@ -125,8 +157,44 @@ namespace GitHub.Runner.Worker.Dap
_commandTcs?.TrySetResult(DapCommand.Disconnect);
});
Trace.Info($"DAP debugger started on port {port}");
return Task.CompletedTask;
Trace.Info($"DAP debugger started on port {debuggerConfig.Tunnel.Port}");
}
private async Task StartTunnelRelayAsync(DebuggerConfig config)
{
Trace.Info($"Starting Dev Tunnel relay (tunnel={config.Tunnel.TunnelId}, cluster={config.Tunnel.ClusterId})");
var userAgents = HostContext.UserAgents.ToArray();
var httpHandler = HostContext.CreateHttpClientHandler();
httpHandler.AllowAutoRedirect = false;
var managementClient = new TunnelManagementClient(
userAgents,
() => Task.FromResult<AuthenticationHeaderValue>(new AuthenticationHeaderValue("tunnel", config.Tunnel.HostToken)),
tunnelServiceUri: null,
httpHandler);
var tunnel = new Tunnel
{
TunnelId = config.Tunnel.TunnelId,
ClusterId = config.Tunnel.ClusterId,
AccessTokens = new Dictionary<string, string>
{
[TunnelAccessScopes.Host] = config.Tunnel.HostToken
},
Ports = new[]
{
new TunnelPort { PortNumber = config.Tunnel.Port }
},
};
_tunnelRelayHost = new TunnelRelayTunnelHost(managementClient, HostContext.GetTrace("DevTunnelRelay").Source);
var tunnelConnectTimeoutSeconds = ResolveTunnelConnectTimeout();
using var connectCts = new CancellationTokenSource(TimeSpan.FromSeconds(tunnelConnectTimeoutSeconds));
Trace.Info($"Connecting to Dev Tunnel relay (timeout: {tunnelConnectTimeoutSeconds}s)");
await _tunnelRelayHost.ConnectAsync(tunnel, connectCts.Token);
Trace.Info("Dev Tunnel relay started");
}
public async Task WaitUntilReadyAsync()
@@ -180,31 +248,55 @@ namespace GitHub.Runner.Worker.Dap
_cancellationRegistration = null;
}
if (_state != DapSessionState.NotStarted)
try
{
try
if (_listener != null || _tunnelRelayHost != null || _connectionLoopTask != null)
{
Trace.Info("Stopping DAP debugger");
CleanupConnection();
try { _listener?.Stop(); }
catch { /* best effort */ }
if (_connectionLoopTask != null)
{
try
{
await Task.WhenAny(_connectionLoopTask, Task.Delay(5000));
}
catch { /* best effort */ }
}
}
catch (Exception ex)
// Tear down Dev Tunnel relay FIRST — it may hold connections to the
// local port and must be fully disposed before we release the listener,
// otherwise the next worker can't bind the same port.
if (_tunnelRelayHost != null)
{
Trace.Error("Error stopping DAP debugger");
Trace.Error(ex);
Trace.Info("Stopping Dev Tunnel relay");
var disposeTask = _tunnelRelayHost.DisposeAsync().AsTask();
if (await Task.WhenAny(disposeTask, Task.Delay(10_000)) != disposeTask)
{
Trace.Warning("Dev Tunnel relay dispose timed out after 10s");
}
else
{
Trace.Info("Dev Tunnel relay stopped");
}
_tunnelRelayHost = null;
}
CleanupConnection();
// Cancel the connection loop first so AcceptTcpClientAsync unblocks
// cleanly, then stop the listener once nothing is using it.
try { _loopCts?.Cancel(); }
catch { /* best effort */ }
try { _listener?.Stop(); }
catch { /* best effort */ }
if (_connectionLoopTask != null)
{
try
{
await Task.WhenAny(_connectionLoopTask, Task.Delay(5000));
}
catch { /* best effort */ }
}
}
catch (Exception ex)
{
Trace.Error("Error stopping DAP debugger");
Trace.Error(ex);
}
lock (_stateLock)
@@ -221,6 +313,8 @@ namespace GitHub.Runner.Worker.Dap
_stream = null;
_readyTcs = null;
_connectionLoopTask = null;
_loopCts?.Dispose();
_loopCts = null;
}
public async Task OnStepStartingAsync(IStep step)
@@ -398,12 +492,7 @@ namespace GitHub.Runner.Worker.Dap
try
{
Trace.Info("Waiting for debug client connection...");
_client = await _listener.AcceptTcpClientAsync();
if (cancellationToken.IsCancellationRequested)
{
break;
}
_client = await _listener.AcceptTcpClientAsync(cancellationToken);
_stream = _client.GetStream();
var remoteEndPoint = _client.Client.RemoteEndPoint;
@@ -418,6 +507,10 @@ namespace GitHub.Runner.Worker.Dap
HandleClientDisconnected();
CleanupConnection();
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
CleanupConnection();
@@ -427,6 +520,13 @@ namespace GitHub.Runner.Worker.Dap
break;
}
// If the listener has been stopped, don't retry.
if (_listener == null || !_listener.Server.IsBound)
{
Trace.Info("Listener stopped, exiting connection loop");
break;
}
Trace.Error("Debugger connection error");
Trace.Error(ex);
@@ -1272,18 +1372,6 @@ namespace GitHub.Runner.Worker.Dap
};
}
internal int ResolvePort()
{
var portEnv = Environment.GetEnvironmentVariable(_portEnvironmentVariable);
if (!string.IsNullOrEmpty(portEnv) && int.TryParse(portEnv, out var customPort) && customPort > 1024 && customPort <= 65535)
{
Trace.Info($"Using custom DAP port {customPort} from {_portEnvironmentVariable}");
return customPort;
}
return _defaultPort;
}
internal int ResolveTimeout()
{
var timeoutEnv = Environment.GetEnvironmentVariable(_timeoutEnvironmentVariable);
@@ -1295,5 +1383,17 @@ namespace GitHub.Runner.Worker.Dap
return _defaultTimeoutMinutes;
}
internal int ResolveTunnelConnectTimeout()
{
var raw = Environment.GetEnvironmentVariable(_tunnelConnectTimeoutSeconds);
if (!string.IsNullOrEmpty(raw) && int.TryParse(raw, out var customTimeout) && customTimeout > 0)
{
Trace.Info($"Using custom tunnel connect timeout {customTimeout}s from {_tunnelConnectTimeoutSeconds}");
return customTimeout;
}
return _defaultTunnelConnectTimeoutSeconds;
}
}
}

View File

@@ -0,0 +1,33 @@
using GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// Consolidated runtime configuration for the job debugger.
/// Populated once from the acquire response and owned by <see cref="GlobalContext"/>.
/// </summary>
public sealed class DebuggerConfig
{
public DebuggerConfig(bool enabled, DebuggerTunnelInfo tunnel)
{
Enabled = enabled;
Tunnel = tunnel;
}
/// <summary>Whether the debugger is enabled for this job.</summary>
public bool Enabled { get; }
/// <summary>
/// Dev Tunnel details for remote debugging.
/// Required when <see cref="Enabled"/> is true.
/// </summary>
public DebuggerTunnelInfo Tunnel { get; }
/// <summary>Whether the tunnel configuration is complete and valid.</summary>
public bool HasValidTunnel => Tunnel != null
&& !string.IsNullOrEmpty(Tunnel.TunnelId)
&& !string.IsNullOrEmpty(Tunnel.ClusterId)
&& !string.IsNullOrEmpty(Tunnel.HostToken)
&& Tunnel.Port >= 1024 && Tunnel.Port <= 65535;
}
}

View File

@@ -892,15 +892,12 @@ namespace GitHub.Runner.Worker
Trace.Info("Initializing Job context");
var jobContext = new JobContext();
if (Global.Variables.GetBoolean(Constants.Runner.Features.AddCheckRunIdToJobContext) ?? false)
ExpressionValues.TryGetValue("job", out var jobDictionary);
if (jobDictionary != null)
{
ExpressionValues.TryGetValue("job", out var jobDictionary);
if (jobDictionary != null)
foreach (var pair in jobDictionary.AssertDictionary("job"))
{
foreach (var pair in jobDictionary.AssertDictionary("job"))
{
jobContext[pair.Key] = pair.Value;
}
jobContext[pair.Key] = pair.Value;
}
}
ExpressionValues["job"] = jobContext;
@@ -970,7 +967,7 @@ namespace GitHub.Runner.Worker
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
// Debugger enabled flag (from acquire response).
Global.EnableDebugger = message.EnableDebugger;
Global.Debugger = new Dap.DebuggerConfig(message.EnableDebugger, message.DebuggerTunnel);
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;

View File

@@ -4,6 +4,7 @@ using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Worker.Dap;
using Newtonsoft.Json.Linq;
using Sdk.RSWebApi.Contracts;
@@ -27,7 +28,7 @@ namespace GitHub.Runner.Worker
public StepsContext StepsContext { get; set; }
public Variables Variables { get; set; }
public bool WriteDebug { get; set; }
public bool EnableDebugger { get; set; }
public DebuggerConfig Debugger { get; set; }
public string InfrastructureFailureCategory { get; set; }
public JObject ContainerHookState { get; set; }
public bool HasTemplateEvaluatorMismatch { get; set; }

View File

@@ -82,5 +82,69 @@ namespace GitHub.Runner.Worker
}
}
}
public string WorkflowRef
{
get
{
if (this.TryGetValue("workflow_ref", out var value) && value is StringContextData str)
{
return str.Value;
}
return null;
}
set
{
this["workflow_ref"] = value != null ? new StringContextData(value) : null;
}
}
public string WorkflowSha
{
get
{
if (this.TryGetValue("workflow_sha", out var value) && value is StringContextData str)
{
return str.Value;
}
return null;
}
set
{
this["workflow_sha"] = value != null ? new StringContextData(value) : null;
}
}
public string WorkflowRepository
{
get
{
if (this.TryGetValue("workflow_repository", out var value) && value is StringContextData str)
{
return str.Value;
}
return null;
}
set
{
this["workflow_repository"] = value != null ? new StringContextData(value) : null;
}
}
public string WorkflowFilePath
{
get
{
if (this.TryGetValue("workflow_file_path", out var value) && value is StringContextData str)
{
return str.Value;
}
return null;
}
set
{
this["workflow_file_path"] = value != null ? new StringContextData(value) : null;
}
}
}
}

View File

@@ -182,7 +182,7 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager.InitializeTempDirectory(jobContext);
// Setup the debugger
if (jobContext.Global.EnableDebugger)
if (jobContext.Global.Debugger?.Enabled == true)
{
Trace.Info("Debugger enabled for this job run");

View File

@@ -19,10 +19,11 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="10.0.3" />
<PackageReference Include="System.ServiceProcess.ServiceController" Version="10.0.3" />
<PackageReference Include="System.Threading.Channels" Version="10.0.3" />
<PackageReference Include="YamlDotNet.Signed" Version="5.3.0" />
<PackageReference Include="Microsoft.DevTunnels.Connections" Version="1.3.16" />
</ItemGroup>
<ItemGroup>

View File

@@ -17,10 +17,9 @@ namespace GitHub.DistributedTask.Expressions2
String expression,
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowCaseFunction = true)
IEnumerable<IFunctionInfo> functions)
{
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
var context = new ParseContext(expression, trace, namedValues, functions);
context.Trace.Info($"Parsing expression: <{expression}>");
return CreateTree(context);
}
@@ -416,12 +415,6 @@ namespace GitHub.DistributedTask.Expressions2
String name,
out IFunctionInfo functionInfo)
{
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
{
functionInfo = null;
return false;
}
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
}
@@ -429,7 +422,6 @@ namespace GitHub.DistributedTask.Expressions2
private sealed class ParseContext
{
public Boolean AllowUnknownKeywords;
public Boolean AllowCaseFunction;
public readonly String Expression;
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
@@ -445,8 +437,7 @@ namespace GitHub.DistributedTask.Expressions2
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowUnknownKeywords = false,
Boolean allowCaseFunction = true)
Boolean allowUnknownKeywords = false)
{
Expression = expression ?? String.Empty;
if (Expression.Length > ExpressionConstants.MaxLength)
@@ -467,7 +458,6 @@ namespace GitHub.DistributedTask.Expressions2
LexicalAnalyzer = new LexicalAnalyzer(Expression);
AllowUnknownKeywords = allowUnknownKeywords;
AllowCaseFunction = allowCaseFunction;
}
private class NoOperationTraceWriter : ITraceWriter

View File

@@ -86,12 +86,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal ITraceWriter TraceWriter { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the case expression function is allowed.
/// Defaults to true. Set to false to disable the case function.
/// </summary>
internal Boolean AllowCaseFunction { get; set; } = true;
private IDictionary<String, Int32> FileIds
{
get

View File

@@ -57,7 +57,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -94,7 +94,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -123,7 +123,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -152,7 +152,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,

View File

@@ -260,6 +260,13 @@ namespace GitHub.DistributedTask.Pipelines
set;
}
[DataMember(EmitDefaultValue = false)]
public DebuggerTunnelInfo DebuggerTunnel
{
get;
set;
}
/// <summary>
/// Gets the collection of variables associated with the current context.
/// </summary>

View File

@@ -0,0 +1,24 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.Pipelines
{
/// <summary>
/// Dev Tunnel information the runner needs to host the debugger tunnel.
/// Matches the run-service <c>DebuggerTunnel</c> contract.
/// </summary>
[DataContract]
public sealed class DebuggerTunnelInfo
{
[DataMember(EmitDefaultValue = false)]
public string TunnelId { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ClusterId { get; set; }
[DataMember(EmitDefaultValue = false)]
public string HostToken { get; set; }
[DataMember(EmitDefaultValue = false)]
public ushort Port { get; set; }
}
}

View File

@@ -681,7 +681,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{

View File

@@ -1,4 +1,4 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
@@ -17,10 +17,9 @@ namespace GitHub.Actions.Expressions
String expression,
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowCaseFunction = true)
IEnumerable<IFunctionInfo> functions)
{
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
var context = new ParseContext(expression, trace, namedValues, functions);
context.Trace.Info($"Parsing expression: <{expression}>");
return CreateTree(context);
}
@@ -322,7 +321,7 @@ namespace GitHub.Actions.Expressions
context.Operators.Pop();
}
var functionOperands = PopOperands(context, parameterCount);
// Node already exists on the operand stack
function = (Function)context.Operands.Peek();
@@ -416,12 +415,6 @@ namespace GitHub.Actions.Expressions
String name,
out IFunctionInfo functionInfo)
{
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
{
functionInfo = null;
return false;
}
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
}
@@ -429,7 +422,6 @@ namespace GitHub.Actions.Expressions
private sealed class ParseContext
{
public Boolean AllowUnknownKeywords;
public Boolean AllowCaseFunction;
public readonly String Expression;
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
@@ -445,8 +437,7 @@ namespace GitHub.Actions.Expressions
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowUnknownKeywords = false,
Boolean allowCaseFunction = true)
Boolean allowUnknownKeywords = false)
{
Expression = expression ?? String.Empty;
if (Expression.Length > ExpressionConstants.MaxLength)
@@ -467,7 +458,6 @@ namespace GitHub.Actions.Expressions
LexicalAnalyzer = new LexicalAnalyzer(Expression);
AllowUnknownKeywords = allowUnknownKeywords;
AllowCaseFunction = allowCaseFunction;
}
private class NoOperationTraceWriter : ITraceWriter

View File

@@ -23,14 +23,14 @@
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="6.0.0" />
<PackageReference Include="System.Security.Cryptography.Cng" Version="5.0.0" />
<PackageReference Include="System.Security.Cryptography.Pkcs" Version="10.0.2" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
<PackageReference Include="System.Security.Cryptography.Pkcs" Version="10.0.3" />
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="10.0.3" />
<PackageReference Include="Minimatch" Version="2.0.0" />
<PackageReference Include="YamlDotNet.Signed" Version="5.3.0" />
<PackageReference Include="System.Net.Http" Version="4.3.4" />
<PackageReference Include="System.Text.RegularExpressions" Version="4.3.1" />
<PackageReference Include="System.Private.Uri" Version="4.3.2" />
<PackageReference Include="System.Formats.Asn1" Version="10.0.2" />
<PackageReference Include="System.Formats.Asn1" Version="10.0.3" />
</ItemGroup>
<ItemGroup>

View File

@@ -32,7 +32,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
return;
}
var effectiveMax = explicitMax ?? CreatePermissionsFromPolicy(context, permissionsPolicy, includeIdToken: isTrusted, includeModels: context.GetFeatures().AllowModelsPermission);
var effectiveMax = explicitMax ?? CreatePermissionsFromPolicy(context, permissionsPolicy, includeIdToken: isTrusted, includeModels: context.GetFeatures().AllowModelsPermission, includeWorkflows: context.GetFeatures().AllowWorkflowsPermission);
if (requested.ViolatesMaxPermissions(effectiveMax, out var permissionLevelViolations))
{
@@ -59,7 +59,8 @@ namespace GitHub.Actions.WorkflowParser.Conversion
TemplateContext context,
string permissionsPolicy,
bool includeIdToken,
bool includeModels)
bool includeModels,
bool includeWorkflows)
{
switch (permissionsPolicy)
{
@@ -70,7 +71,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
Packages = PermissionLevel.Read,
};
case WorkflowConstants.PermissionsPolicy.Write:
return new Permissions(PermissionLevel.Write, includeIdToken: includeIdToken, includeAttestations: true, includeModels: includeModels);
return new Permissions(PermissionLevel.Write, includeIdToken: includeIdToken, includeAttestations: true, includeModels: includeModels, includeWorkflows: includeWorkflows);
default:
throw new ArgumentException($"Unexpected permission policy: '{permissionsPolicy}'");
}

View File

@@ -1828,7 +1828,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
@@ -1877,7 +1877,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
permissionsStr.AssertUnexpectedValue(permissionsStr.Value);
break;
}
return new Permissions(permissionLevel, includeIdToken: true, includeAttestations: true, includeModels: context.GetFeatures().AllowModelsPermission);
return new Permissions(permissionLevel, includeIdToken: true, includeAttestations: true, includeModels: context.GetFeatures().AllowModelsPermission, includeWorkflows: context.GetFeatures().AllowWorkflowsPermission);
}
var mapping = token.AssertMapping("permissions");
@@ -1957,6 +1957,24 @@ namespace GitHub.Actions.WorkflowParser.Conversion
context.Error(key, $"The permission 'models' is not allowed");
}
break;
case "workflows":
if (context.GetFeatures().AllowWorkflowsPermission)
{
// Workflows only supports write; downgrade read to none
if (permissionLevel == PermissionLevel.Read)
{
permissions.Workflows = PermissionLevel.NoAccess;
}
else
{
permissions.Workflows = permissionLevel;
}
}
else
{
context.Error(key, $"The permission 'workflows' is not allowed");
}
break;
default:
break;
}

View File

@@ -1,4 +1,4 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
@@ -113,12 +113,6 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating
/// </summary>
internal Boolean StrictJsonParsing { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the case expression function is allowed.
/// Defaults to true. Set to false to disable the case function.
/// </summary>
internal Boolean AllowCaseFunction { get; set; } = true;
internal ITraceWriter TraceWriter { get; set; }
private IDictionary<String, Int32> FileIds

View File

@@ -1,4 +1,4 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
@@ -55,7 +55,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -93,7 +93,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -123,7 +123,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -153,7 +153,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -289,4 +289,4 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
return result;
}
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.WorkflowParser.Conversion;
@@ -17,7 +17,7 @@ namespace GitHub.Actions.WorkflowParser
public Permissions(Permissions copy)
{
Actions = copy.Actions;
ArtifactMetadata = copy.ArtifactMetadata;
ArtifactMetadata = copy.ArtifactMetadata;
Attestations = copy.Attestations;
Checks = copy.Checks;
Contents = copy.Contents;
@@ -32,16 +32,18 @@ namespace GitHub.Actions.WorkflowParser
SecurityEvents = copy.SecurityEvents;
IdToken = copy.IdToken;
Models = copy.Models;
Workflows = copy.Workflows;
}
public Permissions(
PermissionLevel permissionLevel,
bool includeIdToken,
bool includeAttestations,
bool includeModels)
bool includeModels,
bool includeWorkflows = false)
{
Actions = permissionLevel;
ArtifactMetadata = permissionLevel;
ArtifactMetadata = permissionLevel;
Attestations = includeAttestations ? permissionLevel : PermissionLevel.NoAccess;
Checks = permissionLevel;
Contents = permissionLevel;
@@ -56,8 +58,12 @@ namespace GitHub.Actions.WorkflowParser
SecurityEvents = permissionLevel;
IdToken = includeIdToken ? permissionLevel : PermissionLevel.NoAccess;
// Models must not have higher permissions than Read
Models = includeModels
? (permissionLevel == PermissionLevel.Write ? PermissionLevel.Read : permissionLevel)
Models = includeModels
? (permissionLevel == PermissionLevel.Write ? PermissionLevel.Read : permissionLevel)
: PermissionLevel.NoAccess;
// Workflows is write-only, so only grant it when permissionLevel is Write
Workflows = includeWorkflows && permissionLevel == PermissionLevel.Write
? PermissionLevel.Write
: PermissionLevel.NoAccess;
}
@@ -81,6 +87,7 @@ namespace GitHub.Actions.WorkflowParser
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("security-events", (left.SecurityEvents, right.SecurityEvents)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("id-token", (left.IdToken, right.IdToken)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("models", (left.Models, right.Models)),
new KeyValuePair<string, (PermissionLevel, PermissionLevel)>("workflows", (left.Workflows, right.Workflows)),
};
}
@@ -196,6 +203,13 @@ namespace GitHub.Actions.WorkflowParser
set;
}
[DataMember(Name = "workflows", EmitDefaultValue = false)]
public PermissionLevel Workflows
{
get;
set;
}
public Permissions Clone()
{
return new Permissions(this);

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
@@ -41,6 +41,13 @@ namespace GitHub.Actions.WorkflowParser
[DataMember(EmitDefaultValue = false)]
public bool AllowModelsPermission { get; set; }
/// <summary>
/// Gets or sets a value indicating whether users may use the "workflows" permission.
/// Used during parsing only.
/// </summary>
[DataMember(EmitDefaultValue = false)]
public bool AllowWorkflowsPermission { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the expression function fromJson performs strict JSON parsing.
/// Used during evaluation only.
@@ -67,6 +74,7 @@ namespace GitHub.Actions.WorkflowParser
Snapshot = false, // Default to false since this feature is still in an experimental phase
StrictJsonParsing = false, // Default to false since this is temporary for telemetry purposes only
AllowModelsPermission = false, // Default to false since we want this to be disabled for all non-production environments
AllowWorkflowsPermission = false, // Default to false; gated by feature flag for controlled rollout
AllowServiceContainerCommand = false, // Default to false since this feature is gated by actions_service_container_command
};
}

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using System;
@@ -9,7 +9,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
{
/// <summary>
/// Regression tests for ExpressionParser.CreateTree to verify that
/// allowCaseFunction does not accidentally set allowUnknownKeywords.
/// the case function does not accidentally set allowUnknownKeywords.
/// </summary>
public sealed class ExpressionParserL0
{
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
[Trait("Category", "Sdk")]
public void CreateTree_RejectsUnrecognizedNamedValue()
{
// Regression: allowCaseFunction was passed positionally into
// Regression: the case function parameter was passed positionally into
// the allowUnknownKeywords parameter, causing all named values
// to be silently accepted.
var parser = new ExpressionParser();
@@ -52,7 +52,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Sdk")]
public void CreateTree_CaseFunctionWorks_WhenAllowed()
public void CreateTree_CaseFunctionWorks()
{
var parser = new ExpressionParser();
var namedValues = new List<INamedValueInfo>
@@ -60,35 +60,17 @@ namespace GitHub.Runner.Common.Tests.Sdk
new NamedValueInfo<ContextValueNode>("github"),
};
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: true);
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null);
Assert.NotNull(node);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Sdk")]
public void CreateTree_CaseFunctionRejected_WhenDisallowed()
{
var parser = new ExpressionParser();
var namedValues = new List<INamedValueInfo>
{
new NamedValueInfo<ContextValueNode>("github"),
};
var ex = Assert.Throws<ParseException>(() =>
parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: false));
Assert.Contains("Unrecognized function", ex.Message);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Sdk")]
public void CreateTree_CaseFunctionDoesNotAffectUnknownKeywords()
{
// The key regression test: with allowCaseFunction=true (default),
// unrecognized named values must still be rejected.
// The key regression test: unrecognized named values must still be rejected.
var parser = new ExpressionParser();
var namedValues = new List<INamedValueInfo>
{
@@ -96,7 +78,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
};
var ex = Assert.Throws<ParseException>(() =>
parser.CreateTree("github.ref", null, namedValues, null, allowCaseFunction: true));
parser.CreateTree("github.ref", null, namedValues, null));
Assert.Contains("Unrecognized named-value", ex.Message);
}

View File

@@ -69,6 +69,56 @@ public sealed class AgentJobRequestMessageL0
Assert.False(recoveredMessage.EnableDebugger, "EnableDebugger should be false when JSON contains 'EnableDebugger': false");
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void VerifyDebuggerTunnelDeserialization_WithTunnel()
{
// Arrange
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage), new DataContractJsonSerializerSettings
{
KnownTypes = new[] { typeof(DebuggerTunnelInfo) }
});
string json = DoubleQuotify(
"{'EnableDebugger': true, 'DebuggerTunnel': {'TunnelId': 'tun-123', 'ClusterId': 'use2', 'HostToken': 'tok-abc', 'Port': 4711}}");
// Act
using var stream = new MemoryStream();
stream.Write(Encoding.UTF8.GetBytes(json));
stream.Position = 0;
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
// Assert
Assert.NotNull(recoveredMessage);
Assert.True(recoveredMessage.EnableDebugger);
Assert.NotNull(recoveredMessage.DebuggerTunnel);
Assert.Equal("tun-123", recoveredMessage.DebuggerTunnel.TunnelId);
Assert.Equal("use2", recoveredMessage.DebuggerTunnel.ClusterId);
Assert.Equal("tok-abc", recoveredMessage.DebuggerTunnel.HostToken);
Assert.Equal(4711, recoveredMessage.DebuggerTunnel.Port);
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void VerifyDebuggerTunnelDeserialization_WithoutTunnel()
{
// Arrange
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
string json = DoubleQuotify("{'EnableDebugger': true}");
// Act
using var stream = new MemoryStream();
stream.Write(Encoding.UTF8.GetBytes(json));
stream.Position = 0;
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
// Assert
Assert.NotNull(recoveredMessage);
Assert.True(recoveredMessage.EnableDebugger);
Assert.Null(recoveredMessage.DebuggerTunnel);
}
private static string DoubleQuotify(string text)
{
return text.Replace('\'', '"');

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.IO;
using System.IO.Compression;
using System.Net;
@@ -1254,6 +1255,659 @@ runs:
}
#endif
// =================================================================
// Tests for batched action resolution optimization
// =================================================================
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_BatchesResolutionAcrossCompositeActions()
{
// Verifies that when multiple composite actions at the same depth
// reference sub-actions, those sub-actions are resolved in a single
// batched API call rather than one call per composite.
//
// Action tree:
// CompositePrestep (composite) → [Node action, CompositePrestep2 (composite)]
// CompositePrestep2 (composite) → [Node action, Docker action]
//
// Without batching: 3 API calls (depth 0, depth 1 for CompositePrestep, depth 2 for CompositePrestep2)
// With batching: still 3 calls at most, but the key is that depth-1
// sub-actions from all composites at depth 0 are batched into 1 call.
// And the same action appearing at multiple depths triggers only 1 resolve.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
var resolveCallCount = 0;
var resolvedActions = new List<ActionReferenceList>();
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
resolveCallCount++;
resolvedActions.Add(actions);
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actionId = Guid.NewGuid();
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "CompositePrestep",
RepositoryType = "GitHub"
}
}
};
//Act
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// The composite tree is:
// depth 0: CompositePrestep
// depth 1: Node@RepositoryActionWithWrapperActionfile_Node + CompositePrestep2
// depth 2: Node@RepositoryActionWithWrapperActionfile_Node + Docker@RepositoryActionWithWrapperActionfile_Docker
//
// With batching:
// Call 1 (depth 0, resolve): CompositePrestep
// Call 2 (depth 0→1, pre-resolve): Node + CompositePrestep2 in one batch
// Call 3 (depth 1→2, pre-resolve): Docker only (Node already cached from call 2)
Assert.Equal(3, resolveCallCount);
// Call 1: depth 0 resolve — just the top-level composite
var call1Keys = resolvedActions[0].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep" }, call1Keys);
// Call 2: depth 0→1 pre-resolve — batch both children of CompositePrestep
var call2Keys = resolvedActions[1].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep2", "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node" }, call2Keys);
// Call 3: depth 1→2 pre-resolve — only Docker (Node was cached in call 2)
var call3Keys = resolvedActions[2].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
Assert.Equal(new[] { "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Docker" }, call3Keys);
// Verify all actions were downloaded
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed")));
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
// Verify pre-step tracking still works correctly
Assert.Equal(1, result.PreStepTracker.Count);
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_DeduplicatesResolutionAcrossDepthLevels()
{
// Verifies that an action appearing at multiple depths in the
// composite tree is only resolved once (not re-resolved at each level).
//
// CompositePrestep uses Node action at depth 1.
// CompositePrestep2 (also at depth 1) uses the SAME Node action at depth 2.
// The Node action should only be resolved once total.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
var allResolvedKeys = new List<string>();
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
allResolvedKeys.Add(key);
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actionId = Guid.NewGuid();
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "CompositePrestep",
RepositoryType = "GitHub"
}
}
};
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node appears
// at both depth 1 (sub-step of CompositePrestep) and depth 2 (sub-step of
// CompositePrestep2). With deduplication it should only be resolved once.
var nodeActionKey = "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node";
var nodeResolveCount = allResolvedKeys.FindAll(k => k == nodeActionKey).Count;
Assert.Equal(1, nodeResolveCount);
// Verify the total number of unique actions resolved matches the tree
var uniqueKeys = new HashSet<string>(allResolvedKeys);
// Expected unique actions: CompositePrestep, Node, CompositePrestep2, Docker = 4
Assert.Equal(4, uniqueKeys.Count);
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_MultipleTopLevelActions_BatchesResolution()
{
// Verifies that multiple independent actions at depth 0 are
// resolved in a single API call.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
// Node action has pre+post, needs IActionRunner instances
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
var resolveCallCount = 0;
var firstCallActionCount = 0;
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
resolveCallCount++;
if (resolveCallCount == 1)
{
firstCallActionCount = actions.Actions.Count;
}
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action1",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Node",
RepositoryType = "GitHub"
}
},
new Pipelines.ActionStep()
{
Name = "action2",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Docker",
RepositoryType = "GitHub"
}
}
};
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// Both actions are at depth 0 — should be resolved in a single batch call
Assert.Equal(1, resolveCallCount);
Assert.Equal(2, firstCallActionCount);
// Verify both were downloaded
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
#if OS_LINUX
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_NestedCompositeContainers_BatchedResolution()
{
// Verifies batching with nested composite actions that reference
// container actions (Linux-only since containers require Linux).
//
// CompositeContainerNested (composite):
// → repositoryactionwithdockerfile (Dockerfile)
// → CompositeContainerNested2 (composite):
// → repositoryactionwithdockerfile (Dockerfile, same as above)
// → notpullorbuildimagesmultipletimes1 (Dockerfile)
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
var resolveCallCount = 0;
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
resolveCallCount++;
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actionId = Guid.NewGuid();
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "CompositeContainerNested",
RepositoryType = "GitHub"
}
}
};
//Act
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// Tree has 3 depth levels with 5 unique actions.
// With batching, should need at most 3 resolve calls (one per depth level).
Assert.True(resolveCallCount <= 3, $"Expected at most 3 resolve calls but got {resolveCallCount}");
// repositoryactionwithdockerfile appears at both depth 1 and depth 2.
// Container setup should still work correctly — 2 unique Docker images.
Assert.Equal(2, result.ContainerSetupSteps.Count);
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
#endif
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_ParallelDownloads_MultipleUniqueActions()
{
// Verifies that multiple unique top-level actions are downloaded via
// DownloadActionsInParallelAsync (the parallel code path), and that
// all actions are correctly resolved and downloaded.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
// Node action has pre step, and CompositePrestep recurses into
// sub-actions that also need IActionRunner instances
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
var resolveCallCount = 0;
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
Interlocked.Increment(ref resolveCallCount);
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action1",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Node",
RepositoryType = "GitHub"
}
},
new Pipelines.ActionStep()
{
Name = "action2",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Docker",
RepositoryType = "GitHub"
}
},
new Pipelines.ActionStep()
{
Name = "action3",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "CompositePrestep",
RepositoryType = "GitHub"
}
}
};
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// 3 unique actions at depth 0 → triggers DownloadActionsInParallelAsync
// (parallel path used when uniqueDownloads.Count > 1)
var nodeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed");
var dockerCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed");
var compositeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed");
Assert.True(File.Exists(nodeCompleted), $"Expected watermark at {nodeCompleted}");
Assert.True(File.Exists(dockerCompleted), $"Expected watermark at {dockerCompleted}");
Assert.True(File.Exists(compositeCompleted), $"Expected watermark at {compositeCompleted}");
// All depth-0 actions resolved in a single batch call.
// Composite sub-actions may add 1-2 more calls.
Assert.True(resolveCallCount >= 1, "Expected at least 1 resolve call");
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_DownloadsNextLevelActionsBeforeRecursing()
{
// Verifies that depth-1 actions are downloaded before the depth-2
// pre-resolve fires. We detect this by snapshotting watermark state
// inside the 3rd ResolveActionDownloadInfoAsync callback (which is
// the depth-2 pre-resolve). If pre-download works, depth-1 watermarks
// already exist at that point.
//
// Action tree:
// CompositePrestep (composite) → [Node, CompositePrestep2 (composite)]
// CompositePrestep2 (composite) → [Node, Docker]
//
// Without pre-download: downloads happen during recursion (serial per depth)
// With pre-download: depth 1 actions (Node + CompositePrestep2) are
// downloaded in parallel before recursing, so recursion is a no-op
// for downloads.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
// Track watermark state at the time of each resolve call.
// If pre-download works, when the 3rd resolve fires (depth 2
// pre-resolve for Docker), the depth-1 actions (Node +
// CompositePrestep2) should already have watermarks on disk.
var resolveCallCount = 0;
var watermarksAtResolve3 = new Dictionary<string, bool>();
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
resolveCallCount++;
if (resolveCallCount == 3)
{
// At the time of the 3rd resolve, check if depth-1 actions
// are already downloaded (pre-download should have done this)
var actionsDir2 = _hc.GetDirectory(WellKnownDirectory.Actions);
watermarksAtResolve3["Node"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed"));
watermarksAtResolve3["CompositePrestep2"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "CompositePrestep2.completed"));
}
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actionId = Guid.NewGuid();
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "CompositePrestep",
RepositoryType = "GitHub"
}
}
};
//Act
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert
// All actions should be downloaded (watermarks exist)
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep.completed")));
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
// 3 resolve calls total
Assert.Equal(3, resolveCallCount);
// The key assertion: at the time of the 3rd resolve call
// (pre-resolve for depth 2), the depth-1 actions should
// ALREADY be downloaded thanks to pre-download.
// Without pre-download, these watermarks wouldn't exist yet
// because depth-1 downloads would only happen during recursion.
Assert.True(watermarksAtResolve3["Node"],
"Node action should be pre-downloaded before depth 2 pre-resolve");
Assert.True(watermarksAtResolve3["CompositePrestep2"],
"CompositePrestep2 should be pre-downloaded before depth 2 pre-resolve");
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void PrepareActions_ParallelDownloadsAtSameDepth()
{
// Verifies that multiple unique actions at the same depth are
// downloaded concurrently (Task.WhenAll) rather than sequentially.
// We detect this by checking that all watermarks exist after a
// single PrepareActionsAsync call with multiple top-level actions.
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
try
{
//Arrange
Setup();
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
{
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
foreach (var action in actions.Actions)
{
var key = $"{action.NameWithOwner}@{action.Ref}";
result.Actions[key] = new ActionDownloadInfo
{
NameWithOwner = action.NameWithOwner,
Ref = action.Ref,
ResolvedNameWithOwner = action.NameWithOwner,
ResolvedSha = $"{action.Ref}-sha",
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
};
}
return Task.FromResult(result);
});
var actions = new List<Pipelines.ActionStep>
{
new Pipelines.ActionStep()
{
Name = "action1",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Node",
RepositoryType = "GitHub"
}
},
new Pipelines.ActionStep()
{
Name = "action2",
Id = Guid.NewGuid(),
Reference = new Pipelines.RepositoryPathReference()
{
Name = "TingluoHuang/runner_L0",
Ref = "RepositoryActionWithWrapperActionfile_Docker",
RepositoryType = "GitHub"
}
}
};
//Act
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
//Assert - both downloaded (parallel path used when > 1 unique download)
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
}
finally
{
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]

View File

@@ -504,7 +504,7 @@ namespace GitHub.Runner.Common.Tests.Worker
}
}
[Fact]
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_Node24Action()
@@ -1006,6 +1006,45 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Evaluate_Default_Input_Case_Function()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManager();
actionManifest.Initialize(_hc);
_ec.Object.ExpressionValues["github"] = new LegacyContextData.DictionaryContextData
{
{ "ref", new LegacyContextData.StringContextData("refs/heads/main") },
};
_ec.Object.ExpressionValues["strategy"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["matrix"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["steps"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["job"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["runner"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["env"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionFunctions.Add(new LegacyExpressions.FunctionInfo<GitHub.Runner.Worker.Expressions.HashFilesFunction>("hashFiles", 1, 255));
// Act — evaluate a case() expression as a default input value.
// The feature flag is set, so this should succeed.
var token = new BasicExpressionToken(null, null, null, "case(true, 'matched', 'default')");
var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", token);
// Assert — case() should evaluate successfully
Assert.Equal("matched", result);
}
finally
{
Teardown();
}
}
private void Teardown()
{
_hc?.Dispose();

View File

@@ -16,8 +16,8 @@ namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class DapDebuggerL0
{
private const string PortEnvironmentVariable = "ACTIONS_RUNNER_DAP_PORT";
private const string TimeoutEnvironmentVariable = "ACTIONS_RUNNER_DAP_CONNECTION_TIMEOUT";
private const string TunnelConnectTimeoutVariable = "ACTIONS_RUNNER_DAP_TUNNEL_CONNECT_TIMEOUT_SECONDS";
private DapDebugger _debugger;
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
@@ -25,6 +25,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var hc = new TestHostContext(this, testName);
_debugger = new DapDebugger();
_debugger.Initialize(hc);
_debugger.SkipTunnelRelay = true;
return hc;
}
@@ -56,11 +57,11 @@ namespace GitHub.Runner.Common.Tests.Worker
}
}
private static int GetFreePort()
private static ushort GetFreePort()
{
using var listener = new TcpListener(IPAddress.Loopback, 0);
listener.Start();
return ((IPEndPoint)listener.LocalEndpoint).Port;
return (ushort)((IPEndPoint)listener.LocalEndpoint).Port;
}
private static async Task<TcpClient> ConnectClientAsync(int port)
@@ -140,10 +141,19 @@ namespace GitHub.Runner.Common.Tests.Worker
return Encoding.UTF8.GetString(body);
}
private static Mock<IExecutionContext> CreateJobContext(CancellationToken cancellationToken, string jobName = null)
private static Mock<IExecutionContext> CreateJobContextWithTunnel(CancellationToken cancellationToken, ushort port, string jobName = null)
{
var tunnel = new GitHub.DistributedTask.Pipelines.DebuggerTunnelInfo
{
TunnelId = "test-tunnel",
ClusterId = "test-cluster",
HostToken = "test-token",
Port = port
};
var debuggerConfig = new DebuggerConfig(true, tunnel);
var jobContext = new Mock<IExecutionContext>();
jobContext.Setup(x => x.CancellationToken).Returns(cancellationToken);
jobContext.Setup(x => x.Global).Returns(new GlobalContext { Debugger = debuggerConfig });
jobContext
.Setup(x => x.GetGitHubContext(It.IsAny<string>()))
.Returns((string contextName) => string.Equals(contextName, "job", StringComparison.Ordinal) ? jobName : null);
@@ -165,42 +175,36 @@ namespace GitHub.Runner.Common.Tests.Worker
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolvePortUsesCustomPortFromEnvironment()
public async Task StartAsyncFailsWithoutValidTunnelConfig()
{
using (CreateTestContext())
{
WithEnvironmentVariable(PortEnvironmentVariable, "9999", () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = new Mock<IExecutionContext>();
jobContext.Setup(x => x.CancellationToken).Returns(cts.Token);
jobContext.Setup(x => x.Global).Returns(new GlobalContext
{
Assert.Equal(9999, _debugger.ResolvePort());
Debugger = new DebuggerConfig(true, null)
});
await Assert.ThrowsAsync<ArgumentException>(() => _debugger.StartAsync(jobContext.Object));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolvePortIgnoresInvalidPortFromEnvironment()
public async Task StartAsyncUsesPortFromTunnelConfig()
{
using (CreateTestContext())
{
WithEnvironmentVariable(PortEnvironmentVariable, "not-a-number", () =>
{
Assert.Equal(4711, _debugger.ResolvePort());
});
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolvePortIgnoresOutOfRangePortFromEnvironment()
{
using (CreateTestContext())
{
WithEnvironmentVariable(PortEnvironmentVariable, "99999", () =>
{
Assert.Equal(4711, _debugger.ResolvePort());
});
var port = GetFreePort();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
Assert.True(client.Connected);
await _debugger.StopAsync();
}
}
@@ -254,15 +258,12 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
Assert.True(client.Connected);
await _debugger.StopAsync();
});
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
Assert.True(client.Connected);
await _debugger.StopAsync();
}
}
@@ -275,13 +276,10 @@ namespace GitHub.Runner.Common.Tests.Worker
{
foreach (var port in new[] { GetFreePort(), GetFreePort() })
{
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
await _debugger.StopAsync();
});
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
await _debugger.StopAsync();
}
}
}
@@ -294,25 +292,22 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
Assert.Equal(DapSessionState.Ready, _debugger.State);
await _debugger.StopAsync();
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
Assert.Equal(DapSessionState.Ready, _debugger.State);
await _debugger.StopAsync();
}
}
@@ -324,25 +319,22 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port, "ci-job");
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(client.GetStream(), new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token, "ci-job");
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(client.GetStream(), new Request
{
Seq = 1,
Type = "request",
Command = "threads"
});
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"command\":\"threads\"", response);
Assert.Contains("\"name\":\"Job: ci-job\"", response);
await _debugger.StopAsync();
Seq = 1,
Type = "request",
Command = "threads"
});
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"command\":\"threads\"", response);
Assert.Contains("\"name\":\"Job: ci-job\"", response);
await _debugger.StopAsync();
}
}
@@ -354,30 +346,27 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
cts.Cancel();
// In the real runner, JobRunner always calls OnJobCompletedAsync
// from a finally block. The cancellation callback only unblocks
// pending waits; OnJobCompletedAsync handles state + cleanup.
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
cts.Cancel();
// In the real runner, JobRunner always calls OnJobCompletedAsync
// from a finally block. The cancellation callback only unblocks
// pending waits; OnJobCompletedAsync handles state + cleanup.
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
}
}
@@ -400,25 +389,22 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
await SendRequestAsync(client.GetStream(), new Request
{
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
}
}
@@ -441,20 +427,17 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
await Task.Delay(50);
cts.Cancel();
var waitTask = _debugger.WaitUntilReadyAsync();
await Task.Delay(50);
cts.Cancel();
var ex = await Assert.ThrowsAnyAsync<OperationCanceledException>(() => waitTask);
Assert.IsNotType<TimeoutException>(ex);
await _debugger.StopAsync();
});
var ex = await Assert.ThrowsAnyAsync<OperationCanceledException>(() => waitTask);
Assert.IsNotType<TimeoutException>(ex);
await _debugger.StopAsync();
}
}
@@ -471,32 +454,29 @@ namespace GitHub.Runner.Common.Tests.Worker
hc.SecretMasker.AddValue("initialized");
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
Seq = 1,
Type = "request",
Command = "initialize"
});
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"type\":\"response\"", response);
Assert.Contains("\"command\":\"initialize\"", response);
Assert.Contains("\"success\":true", response);
var initializedEvent = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"type\":\"event\"", initializedEvent);
Assert.Contains("\"event\":\"initialized\"", initializedEvent);
await _debugger.StopAsync();
Seq = 1,
Type = "request",
Command = "initialize"
});
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"type\":\"response\"", response);
Assert.Contains("\"command\":\"initialize\"", response);
Assert.Contains("\"success\":true", response);
var initializedEvent = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
Assert.Contains("\"type\":\"event\"", initializedEvent);
Assert.Contains("\"event\":\"initialized\"", initializedEvent);
await _debugger.StopAsync();
}
}
@@ -508,41 +488,38 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
// Complete handshake so session is ready
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
// Complete handshake so session is ready
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
// Simulate a step starting (which pauses)
var step = new Mock<IStep>();
step.Setup(s => s.DisplayName).Returns("Test Step");
step.Setup(s => s.ExecutionContext).Returns((IExecutionContext)null);
var stepTask = _debugger.OnStepStartingAsync(step.Object);
// Give the step time to pause
await Task.Delay(50);
// Cancel the job — should release the step pause
cts.Cancel();
await stepTask;
// In the real runner, OnJobCompletedAsync always follows.
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
Seq = 1,
Type = "request",
Command = "configurationDone"
});
await waitTask;
// Simulate a step starting (which pauses)
var step = new Mock<IStep>();
step.Setup(s => s.DisplayName).Returns("Test Step");
step.Setup(s => s.ExecutionContext).Returns((IExecutionContext)null);
var stepTask = _debugger.OnStepStartingAsync(step.Object);
// Give the step time to pause
await Task.Delay(50);
// Cancel the job — should release the step pause
cts.Cancel();
await stepTask;
// In the real runner, OnJobCompletedAsync always follows.
await _debugger.OnJobCompletedAsync();
Assert.Equal(DapSessionState.Terminated, _debugger.State);
}
}
@@ -558,13 +535,10 @@ namespace GitHub.Runner.Common.Tests.Worker
// Start then immediate stop (no connection, no WaitUntilReady)
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
await _debugger.StopAsync();
});
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
await _debugger.StopAsync();
// StopAsync after already stopped
await _debugger.StopAsync();
@@ -579,36 +553,86 @@ namespace GitHub.Runner.Common.Tests.Worker
using (CreateTestContext())
{
var port = GetFreePort();
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
await _debugger.StartAsync(jobContext.Object);
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
var jobContext = CreateJobContext(cts.Token);
await _debugger.StartAsync(jobContext.Object);
Seq = 1,
Type = "request",
Command = "configurationDone"
});
var waitTask = _debugger.WaitUntilReadyAsync();
using var client = await ConnectClientAsync(port);
var stream = client.GetStream();
await SendRequestAsync(stream, new Request
{
Seq = 1,
Type = "request",
Command = "configurationDone"
});
// Read the configurationDone response
await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
await waitTask;
// Read the configurationDone response
await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
await waitTask;
// Complete the job — events are sent via OnJobCompletedAsync
await _debugger.OnJobCompletedAsync();
// Complete the job — events are sent via OnJobCompletedAsync
await _debugger.OnJobCompletedAsync();
var msg1 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
var msg2 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
var msg1 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
var msg2 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
// Both events should arrive (order may vary)
var combined = msg1 + msg2;
Assert.Contains("\"event\":\"terminated\"", combined);
Assert.Contains("\"event\":\"exited\"", combined);
}
}
// Both events should arrive (order may vary)
var combined = msg1 + msg2;
Assert.Contains("\"event\":\"terminated\"", combined);
Assert.Contains("\"event\":\"exited\"", combined);
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolveTunnelConnectTimeoutReturnsDefaultWhenNoVariable()
{
using (CreateTestContext())
{
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolveTunnelConnectTimeoutUsesCustomValue()
{
using (CreateTestContext())
{
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "60", () =>
{
Assert.Equal(60, _debugger.ResolveTunnelConnectTimeout());
});
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolveTunnelConnectTimeoutIgnoresInvalidValue()
{
using (CreateTestContext())
{
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "not-a-number", () =>
{
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
});
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ResolveTunnelConnectTimeoutIgnoresZeroValue()
{
using (CreateTestContext())
{
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "0", () =>
{
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
});
}
}

View File

@@ -1203,19 +1203,19 @@ namespace GitHub.Runner.Common.Tests.Worker
}
}
// TODO: this test can be deleted when `AddCheckRunIdToJobContext` is fully rolled out
// AddCheckRunIdToJobContext is now permanently enabled server-side (hardcoded to "true"
// in acquirejobhandler.go). The runner always copies ContextData["job"] entries, so the
// flag-disabled test is no longer applicable. Replaced with a test that verifies
// check_run_id is always hydrated regardless of the flag value.
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void InitializeJob_HydratesJobContextWithCheckRunId_FeatureFlagDisabled()
public void InitializeJob_HydratesJobContextWithCheckRunId_AlwaysCopied()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Create a job request message and make sure the feature flag is disabled
var variables = new Dictionary<string, VariableValue>()
{
[Constants.Runner.Features.AddCheckRunIdToJobContext] = new VariableValue("false"),
};
// Arrange: No feature flag set at all
var variables = new Dictionary<string, VariableValue>();
var jobRequest = new Pipelines.AgentJobRequestMessage(new TaskOrchestrationPlanReference(), new TimelineReference(), Guid.NewGuid(), "some job name", "some job name", null, null, null, variables, new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
var pagingLogger = new Moq.Mock<IPagingLogger>();
var jobServerQueue = new Moq.Mock<IJobServerQueue>();
@@ -1233,9 +1233,80 @@ namespace GitHub.Runner.Common.Tests.Worker
// Act
ec.InitializeJob(jobRequest, CancellationToken.None);
// Assert
// Assert: check_run_id is always copied regardless of flag
Assert.NotNull(ec.JobContext);
Assert.Null(ec.JobContext.CheckRunId); // with the feature flag disabled we should not have added a CheckRunId to the JobContext
Assert.Equal(123456, ec.JobContext.CheckRunId);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void InitializeJob_HydratesJobContextWithWorkflowIdentity()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange
var variables = new Dictionary<string, VariableValue>();
var jobRequest = new Pipelines.AgentJobRequestMessage(new TaskOrchestrationPlanReference(), new TimelineReference(), Guid.NewGuid(), "some job name", "some job name", null, null, null, variables, new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
var pagingLogger = new Moq.Mock<IPagingLogger>();
var jobServerQueue = new Moq.Mock<IJobServerQueue>();
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Arrange: Server sends all 4 workflow identity fields
var jobContext = new Pipelines.ContextData.DictionaryContextData();
jobContext["workflow_ref"] = new StringContextData("my-org/my-repo/.github/workflows/reusable.yml@refs/heads/main");
jobContext["workflow_sha"] = new StringContextData("abc123def456");
jobContext["workflow_repository"] = new StringContextData("my-org/my-repo");
jobContext["workflow_file_path"] = new StringContextData(".github/workflows/reusable.yml");
jobRequest.ContextData["job"] = jobContext;
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Act
ec.InitializeJob(jobRequest, CancellationToken.None);
// Assert: all properties hydrated from server
Assert.NotNull(ec.JobContext);
Assert.Equal("my-org/my-repo/.github/workflows/reusable.yml@refs/heads/main", ec.JobContext.WorkflowRef);
Assert.Equal("abc123def456", ec.JobContext.WorkflowSha);
Assert.Equal("my-org/my-repo", ec.JobContext.WorkflowRepository);
Assert.Equal(".github/workflows/reusable.yml", ec.JobContext.WorkflowFilePath);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void InitializeJob_WorkflowIdentityNotSet_WhenServerSendsNoData()
{
using (TestHostContext hc = CreateTestContext())
{
// Arrange: Server sends no workflow identity in job context
var variables = new Dictionary<string, VariableValue>();
var jobRequest = new Pipelines.AgentJobRequestMessage(new TaskOrchestrationPlanReference(), new TimelineReference(), Guid.NewGuid(), "some job name", "some job name", null, null, null, variables, new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null, null);
var pagingLogger = new Moq.Mock<IPagingLogger>();
var jobServerQueue = new Moq.Mock<IJobServerQueue>();
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
var ec = new Runner.Worker.ExecutionContext();
ec.Initialize(hc);
// Arrange: empty job context
jobRequest.ContextData["job"] = new Pipelines.ContextData.DictionaryContextData();
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
// Act
ec.InitializeJob(jobRequest, CancellationToken.None);
// Assert: no workflow identity
Assert.NotNull(ec.JobContext);
Assert.Null(ec.JobContext.WorkflowRef);
Assert.Null(ec.JobContext.WorkflowSha);
Assert.Null(ec.JobContext.WorkflowRepository);
Assert.Null(ec.JobContext.WorkflowFilePath);
}
}

View File

@@ -34,5 +34,109 @@ namespace GitHub.Runner.Common.Tests.Worker
ctx.CheckRunId = null;
Assert.Null(ctx.CheckRunId);
}
[Fact]
public void WorkflowRef_SetAndGet_WorksCorrectly()
{
var ctx = new JobContext();
ctx.WorkflowRef = "owner/repo/.github/workflows/ci.yml@refs/heads/main";
Assert.Equal("owner/repo/.github/workflows/ci.yml@refs/heads/main", ctx.WorkflowRef);
Assert.True(ctx.TryGetValue("workflow_ref", out var value));
Assert.IsType<StringContextData>(value);
}
[Fact]
public void WorkflowRef_NotSet_ReturnsNull()
{
var ctx = new JobContext();
Assert.Null(ctx.WorkflowRef);
}
[Fact]
public void WorkflowRef_SetNull_ClearsValue()
{
var ctx = new JobContext();
ctx.WorkflowRef = "owner/repo/.github/workflows/ci.yml@refs/heads/main";
ctx.WorkflowRef = null;
Assert.Null(ctx.WorkflowRef);
}
[Fact]
public void WorkflowSha_SetAndGet_WorksCorrectly()
{
var ctx = new JobContext();
ctx.WorkflowSha = "abc123def456";
Assert.Equal("abc123def456", ctx.WorkflowSha);
Assert.True(ctx.TryGetValue("workflow_sha", out var value));
Assert.IsType<StringContextData>(value);
}
[Fact]
public void WorkflowSha_NotSet_ReturnsNull()
{
var ctx = new JobContext();
Assert.Null(ctx.WorkflowSha);
}
[Fact]
public void WorkflowSha_SetNull_ClearsValue()
{
var ctx = new JobContext();
ctx.WorkflowSha = "abc123def456";
ctx.WorkflowSha = null;
Assert.Null(ctx.WorkflowSha);
}
[Fact]
public void WorkflowRepository_SetAndGet_WorksCorrectly()
{
var ctx = new JobContext();
ctx.WorkflowRepository = "owner/repo";
Assert.Equal("owner/repo", ctx.WorkflowRepository);
Assert.True(ctx.TryGetValue("workflow_repository", out var value));
Assert.IsType<StringContextData>(value);
}
[Fact]
public void WorkflowRepository_NotSet_ReturnsNull()
{
var ctx = new JobContext();
Assert.Null(ctx.WorkflowRepository);
}
[Fact]
public void WorkflowRepository_SetNull_ClearsValue()
{
var ctx = new JobContext();
ctx.WorkflowRepository = "owner/repo";
ctx.WorkflowRepository = null;
Assert.Null(ctx.WorkflowRepository);
}
[Fact]
public void WorkflowFilePath_SetAndGet_WorksCorrectly()
{
var ctx = new JobContext();
ctx.WorkflowFilePath = ".github/workflows/ci.yml";
Assert.Equal(".github/workflows/ci.yml", ctx.WorkflowFilePath);
Assert.True(ctx.TryGetValue("workflow_file_path", out var value));
Assert.IsType<StringContextData>(value);
}
[Fact]
public void WorkflowFilePath_NotSet_ReturnsNull()
{
var ctx = new JobContext();
Assert.Null(ctx.WorkflowFilePath);
}
[Fact]
public void WorkflowFilePath_SetNull_ClearsValue()
{
var ctx = new JobContext();
ctx.WorkflowFilePath = ".github/workflows/ci.yml";
ctx.WorkflowFilePath = null;
Assert.Null(ctx.WorkflowFilePath);
}
}
}