Compare commits

..

51 Commits

Author SHA1 Message Date
Francesco Renzi
e215a33c45 steps are actually replayable! 2026-01-16 01:57:06 +00:00
Francesco Renzi
b4d39971ed clear expressions first 2026-01-16 01:31:26 +00:00
Francesco Renzi
ebdf081671 fix indexing 2026-01-16 01:05:30 +00:00
Francesco Renzi
82e35c0caa Include line when stepping back to get to right index 2026-01-16 00:49:08 +00:00
Francesco Renzi
b652350bda update extension and proxy for keepalive 2026-01-16 00:29:40 +00:00
Francesco Renzi
2525a1f9a3 fix ordering for first step 2026-01-16 00:29:40 +00:00
Francesco Renzi
ff85ab7fe0 handle cancellation 2026-01-15 23:29:45 +00:00
Francesco Renzi
2800573f56 wip 2026-01-15 22:46:10 +00:00
Francesco Renzi
f1a0d1a9f8 wip 2026-01-15 21:26:43 +00:00
Francesco Renzi
15b7034088 wip extension 2026-01-15 21:16:55 +00:00
Francesco Renzi
bbe97ff1c8 logging 2026-01-15 18:11:34 +00:00
Francesco Renzi
7a36a68b15 step-backwards working! 2026-01-15 17:05:31 +00:00
Francesco Renzi
f45c5d0785 Fix expression parsing (partially) 2026-01-15 16:05:01 +00:00
Francesco Renzi
7e4f99337f fix double output + masking 2026-01-15 13:52:24 +00:00
Francesco Renzi
186656e153 Phase 5 done 2026-01-14 21:24:59 +00:00
Francesco Renzi
2e02381901 phase 4 complete 2026-01-14 21:14:10 +00:00
Francesco Renzi
a55696a429 Phase 3 complete 2026-01-14 21:05:55 +00:00
Francesco Renzi
379ac038b2 Phase 2 done 2026-01-14 20:34:11 +00:00
Francesco Renzi
14e8e1f667 Phase 1 done 2026-01-14 20:21:52 +00:00
Allan Guigou
3f43560cb9 Prepare runner release 2.331.0 (#4190) 2026-01-09 12:15:39 -05:00
dependabot[bot]
73f7dbb681 Bump Azure.Storage.Blobs from 12.26.0 to 12.27.0 (#4189)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-09 14:54:40 +00:00
dependabot[bot]
f554a6446d Bump typescript from 5.9.2 to 5.9.3 in /src/Misc/expressionFunc/hashFiles (#4184)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2026-01-07 18:52:44 +00:00
Tingluo Huang
bdceac4ab3 Allow hosted VM report job telemetry via .setup_info file. (#4186) 2026-01-07 13:27:22 -05:00
Tingluo Huang
3f1dd45172 Set ACTIONS_ORCHESTRATION_ID as env to actions. (#4178)
Co-authored-by: Copilot <198982749+Copilot@users.noreply.github.com>
Co-authored-by: TingluoHuang <1750815+TingluoHuang@users.noreply.github.com>
2026-01-06 14:06:47 -05:00
dependabot[bot]
cf8f50b4d8 Bump actions/upload-artifact from 5 to 6 (#4157)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2025-12-21 08:31:15 +00:00
dependabot[bot]
2cf22c4858 Bump actions/download-artifact from 6 to 7 (#4155)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2025-12-18 23:52:35 +00:00
eric sciple
04d77df0c7 Cleanup feature flag actions_container_action_runner_temp (#4163) 2025-12-18 14:53:43 -06:00
Allan Guigou
651077689d Add support for case function (#4147)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-17 15:57:05 +00:00
Tingluo Huang
c96dcd4729 Bump docker image to use ubuntu 24.04 (#4018) 2025-12-12 13:38:45 -05:00
github-actions[bot]
4b0058f15c chore: update Node versions (#4149)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-12-12 14:57:21 +00:00
dependabot[bot]
87d1dfb798 Bump actions/checkout from 5 to 6 (#4130)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2025-12-12 11:00:47 +00:00
dependabot[bot]
c992a2b406 Bump actions/github-script from 7 to 8 (#4137)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Salman Chishti <salmanmkc@GitHub.com>
2025-12-12 10:54:38 +00:00
Tingluo Huang
b2204f1fab Ensure safe_sleep tries alternative approaches (#4146) 2025-12-11 09:53:50 -05:00
github-actions[bot]
f99c3e6ee8 chore: update Node versions (#4144)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-12-08 16:52:16 +00:00
Tingluo Huang
463496e4fb Fix regex for validating runner version format (#4136) 2025-11-24 10:30:33 -05:00
Tingluo Huang
3f9f6f3994 Update workflow around runner docker image. (#4133) 2025-11-24 08:59:01 -05:00
github-actions[bot]
221f65874f Update Docker to v29.0.2 and Buildx to v0.30.1 (#4135)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-11-24 11:37:28 +00:00
Nikola Jokic
9a21440691 Fix owner of /home/runner directory (#4132) 2025-11-21 16:15:17 -05:00
Tingluo Huang
54bcc001e5 Prepare runner release v2.330.0 (#4123) 2025-11-19 09:24:04 -05:00
Tingluo Huang
7df164d2c7 Bump npm pkg version for hashFiles. (#4122) 2025-11-18 10:12:23 -05:00
eric sciple
a54f380b0e Compare updated workflow parser for ActionManifestManager (#4111) 2025-11-18 01:15:46 +00:00
github-actions[bot]
8b184c3871 Update dotnet sdk to latest version @8.0.416 (#4116)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-11-17 23:22:47 +00:00
github-actions[bot]
b56b161118 chore: update Node versions (#4115)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-11-17 18:18:08 -05:00
github-actions[bot]
69aca04de1 Update Docker to v29.0.1 and Buildx to v0.30.0 (#4114)
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-11-17 02:40:31 +00:00
Tingluo Huang
b3a60e6b06 Retry http error related to DNS resolution failure. (#4110) 2025-11-13 13:24:09 -05:00
dupondje
334df748d1 Only start runner after network is online (#4094)
Signed-off-by: Jean-Louis Dupond <jean-louis@dupond.be>
2025-11-12 01:33:26 +00:00
dependabot[bot]
b08f962182 Bump Azure.Storage.Blobs from 12.25.1 to 12.26.0 (#4077)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 01:07:51 +00:00
dependabot[bot]
b8144769c6 Bump actions/upload-artifact from 4 to 5 (#4088)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-11 20:03:26 -05:00
dependabot[bot]
2a00363a90 Bump actions/download-artifact from 5 to 6 (#4089)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 00:50:59 +00:00
lets-build-an-ocean
a1c09806c3 Add support for libicu73-76 for newer Debian/Ubuntu versions (#4098) 2025-11-12 00:45:12 +00:00
Caleb Xu
c0776daddb fix(dockerfile): set more lenient permissions on /home/runner (#4083)
Signed-off-by: Caleb Xu <caxu@redhat.com>
2025-11-10 17:53:27 -05:00
91 changed files with 14214 additions and 910 deletions

View File

@@ -4,7 +4,7 @@
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "8.0.415"
"version": "8.0.416"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "20"

View File

@@ -14,6 +14,9 @@ on:
paths-ignore:
- '**.md'
permissions:
contents: read
jobs:
build:
strategy:
@@ -50,7 +53,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
# Build runner layout
- name: Build & Layout Release
@@ -75,8 +78,53 @@ jobs:
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: runner-package-${{ matrix.runtime }}
path: |
_package
docker:
strategy:
matrix:
os: [ ubuntu-latest, ubuntu-24.04-arm ]
include:
- os: ubuntu-latest
docker_platform: linux/amd64
- os: ubuntu-24.04-arm
docker_platform: linux/arm64
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v6
- name: Get latest runner version
id: latest_runner
uses: actions/github-script@v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const release = await github.rest.repos.getLatestRelease({
owner: 'actions',
repo: 'runner',
});
const version = release.data.tag_name.replace(/^v/, '');
core.setOutput('version', version);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker image
uses: docker/build-push-action@v6
with:
context: ./images
load: true
platforms: ${{ matrix.docker_platform }}
tags: |
${{ github.sha }}:latest
build-args: |
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
- name: Test Docker image
run: |
docker run --rm ${{ github.sha }}:latest ./run.sh --version

View File

@@ -23,7 +23,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -29,7 +29,7 @@ jobs:
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:

View File

@@ -17,7 +17,7 @@ jobs:
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Check Docker version
id: check_docker_version
@@ -89,7 +89,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Update Docker version
shell: bash

75
.github/workflows/docker-publish.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
name: Publish DockerImage from Release Branch
on:
workflow_dispatch:
inputs:
releaseBranch:
description: 'Release Branch (releases/mXXX)'
required: true
jobs:
publish-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
attestations: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
ref: ${{ github.event.inputs.releaseBranch }}
- name: Compute image version
id: image
uses: actions/github-script@v8
with:
script: |
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
console.log(`Using runner version ${runnerVersion}`);
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
throw new Error(`Invalid runner version: ${runnerVersion}`);
}
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6
with:
context: ./images
platforms: |
linux/amd64
linux/arm64
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.licenses=MIT
annotations: |
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
- name: Generate attestation
uses: actions/attest-build-provenance@v3
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -15,7 +15,7 @@ jobs:
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Get current major minor version
id: fetch_current_version
shell: bash
@@ -89,7 +89,7 @@ jobs:
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
with:
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Create Pull Request

View File

@@ -9,7 +9,7 @@ jobs:
update-node:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Get latest Node versions
id: node-versions
run: |

View File

@@ -7,7 +7,7 @@ jobs:
npm-audit-with-ts-fix:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:

View File

@@ -9,7 +9,7 @@ jobs:
npm-audit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6

View File

@@ -11,12 +11,12 @@ jobs:
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
# Make sure ./releaseVersion match ./src/runnerversion
# Query GitHub release ensure version is not used
- name: Check version
uses: actions/github-script@v8.0.0
uses: actions/github-script@v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -86,7 +86,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
# Build runner layout
- name: Build & Layout Release
@@ -118,7 +118,7 @@ jobs:
# Upload runner package tar.gz/zip as artifact.
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: runner-packages-${{ matrix.runtime }}
path: |
@@ -129,41 +129,41 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
# Download runner package tar.gz/zip produced by 'build' job
- name: Download Artifact (win-x64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-win-x64
path: ./
- name: Download Artifact (win-arm64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-win-arm64
path: ./
- name: Download Artifact (osx-x64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-osx-x64
path: ./
- name: Download Artifact (osx-arm64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-osx-arm64
path: ./
- name: Download Artifact (linux-x64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-linux-x64
path: ./
- name: Download Artifact (linux-arm)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-linux-arm
path: ./
- name: Download Artifact (linux-arm64)
uses: actions/download-artifact@v5
uses: actions/download-artifact@v7
with:
name: runner-packages-linux-arm64
path: ./
@@ -171,7 +171,7 @@ jobs:
# Create ReleaseNote file
- name: Create ReleaseNote
id: releaseNote
uses: actions/github-script@v8.0.0
uses: actions/github-script@v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -296,11 +296,11 @@ jobs:
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v5
uses: actions/checkout@v6
- name: Compute image version
id: image
uses: actions/github-script@v8.0.0
uses: actions/github-script@v8
with:
script: |
const fs = require('fs');
@@ -334,8 +334,9 @@ jobs:
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT
annotations: |
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
- name: Generate attestation
uses: actions/attest-build-provenance@v3

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,346 @@
# DAP Cancellation Support
**Status:** Implemented
**Author:** OpenCode
**Date:** January 2026
## Problem
When a cancellation signal for the current job comes in from the server, the DAP debugging session doesn't properly respond. If the runner is paused at a breakpoint waiting for debugger commands (or if a debugger never connects), the job gets stuck forever and requires manually deleting the runner.
### Root Cause
The `DapDebugSession.WaitForCommandAsync()` method uses a `TaskCompletionSource` that only completes when a DAP command arrives from the debugger. There's no mechanism to interrupt this wait when the job is cancelled externally.
Additionally, REPL shell commands use `CancellationToken.None`, so they also ignore job cancellation.
## Solution
Add proper cancellation token support throughout the DAP debugging flow:
1. Pass the job cancellation token to `OnStepStartingAsync` and `WaitForCommandAsync`
2. Register cancellation callbacks to release blocking waits
3. Add a `CancelSession()` method for external cancellation
4. Send DAP `terminated` and `exited` events to notify the debugger before cancelling
5. Use the cancellation token for REPL shell command execution
## Progress Checklist
- [x] **Phase 1:** Update IDapDebugSession interface
- [x] **Phase 2:** Update DapDebugSession implementation
- [x] **Phase 3:** Update StepsRunner to pass cancellation token
- [x] **Phase 4:** Update JobRunner to register cancellation handler
- [ ] **Phase 5:** Testing
## Files to Modify
| File | Changes |
|------|---------|
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add cancellation support to `OnStepStartingAsync`, `WaitForCommandAsync`, `ExecuteShellCommandAsync`, add `CancelSession` method |
| `src/Runner.Worker/StepsRunner.cs` | Pass `jobContext.CancellationToken` to `OnStepStartingAsync` |
| `src/Runner.Worker/JobRunner.cs` | Register cancellation callback to call `CancelSession` on the debug session |
## Detailed Implementation
### Phase 1: Update IDapDebugSession Interface
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs` (lines ~144-242)
Add new method to interface:
```csharp
/// <summary>
/// Cancels the debug session externally (e.g., job cancellation).
/// Sends terminated event to debugger and releases any blocking waits.
/// </summary>
void CancelSession();
```
Update existing method signature:
```csharp
// Change from:
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep);
// Change to:
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken);
```
### Phase 2: Update DapDebugSession Implementation
#### 2.1 Add cancellation token field
**Location:** Around line 260-300 (field declarations section)
```csharp
// Add field to store the job cancellation token for use by REPL commands
private CancellationToken _jobCancellationToken;
```
#### 2.2 Update OnStepStartingAsync
**Location:** Line 1159
```csharp
public async Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken)
{
if (!IsActive)
{
return;
}
_currentStep = step;
_jobContext = jobContext;
_jobCancellationToken = cancellationToken; // Store for REPL commands
// ... rest of existing implementation ...
// Update the WaitForCommandAsync call at line 1212:
await WaitForCommandAsync(cancellationToken);
}
```
#### 2.3 Update WaitForCommandAsync
**Location:** Line 1288
```csharp
private async Task WaitForCommandAsync(CancellationToken cancellationToken)
{
lock (_stateLock)
{
_state = DapSessionState.Paused;
_commandTcs = new TaskCompletionSource<DapCommand>(TaskCreationOptions.RunContinuationsAsynchronously);
}
Trace.Info("Waiting for debugger command...");
// Register cancellation to release the wait
using (cancellationToken.Register(() =>
{
Trace.Info("Job cancellation detected, releasing debugger wait");
_commandTcs?.TrySetResult(DapCommand.Disconnect);
}))
{
var command = await _commandTcs.Task;
Trace.Info($"Received command: {command}");
lock (_stateLock)
{
if (_state == DapSessionState.Paused)
{
_state = DapSessionState.Running;
}
}
// Send continued event (only for normal commands, not cancellation)
if (!cancellationToken.IsCancellationRequested &&
(command == DapCommand.Continue || command == DapCommand.Next))
{
_server?.SendEvent(new Event
{
EventType = "continued",
Body = new ContinuedEventBody
{
ThreadId = JobThreadId,
AllThreadsContinued = true
}
});
}
}
}
```
#### 2.4 Add CancelSession method
**Location:** After `OnJobCompleted()` method, around line 1286
```csharp
/// <summary>
/// Cancels the debug session externally (e.g., job cancellation).
/// Sends terminated/exited events to debugger and releases any blocking waits.
/// </summary>
public void CancelSession()
{
Trace.Info("CancelSession called - terminating debug session");
lock (_stateLock)
{
if (_state == DapSessionState.Terminated)
{
Trace.Info("Session already terminated, ignoring CancelSession");
return;
}
_state = DapSessionState.Terminated;
}
// Send terminated event to debugger so it updates its UI
_server?.SendEvent(new Event
{
EventType = "terminated",
Body = new TerminatedEventBody()
});
// Send exited event with cancellation exit code (130 = SIGINT convention)
_server?.SendEvent(new Event
{
EventType = "exited",
Body = new ExitedEventBody { ExitCode = 130 }
});
// Release any pending command waits
_commandTcs?.TrySetResult(DapCommand.Disconnect);
Trace.Info("Debug session cancelled");
}
```
#### 2.5 Update ExecuteShellCommandAsync
**Location:** Line 889-895
Change the `ExecuteAsync` call to use the stored cancellation token:
```csharp
int exitCode;
try
{
exitCode = await processInvoker.ExecuteAsync(
workingDirectory: workingDirectory,
fileName: shell,
arguments: string.Format(shellArgs, command),
environment: env,
requireExitCodeZero: false,
cancellationToken: _jobCancellationToken); // Changed from CancellationToken.None
}
catch (OperationCanceledException)
{
Trace.Info("Shell command cancelled due to job cancellation");
return new EvaluateResponseBody
{
Result = "(cancelled)",
Type = "error",
VariablesReference = 0
};
}
catch (Exception ex)
{
Trace.Error($"Shell execution failed: {ex}");
return new EvaluateResponseBody
{
Result = $"Error: {ex.Message}",
Type = "error",
VariablesReference = 0
};
}
```
### Phase 3: Update StepsRunner
**File:** `src/Runner.Worker/StepsRunner.cs`
**Location:** Line 204
Change:
```csharp
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep);
```
To:
```csharp
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep, jobContext.CancellationToken);
```
### Phase 4: Update JobRunner
**File:** `src/Runner.Worker/JobRunner.cs`
#### 4.1 Add cancellation registration
**Location:** After line 191 (after "Debugger connected" output), inside the debug mode block:
```csharp
// Register cancellation handler to properly terminate DAP session on job cancellation
CancellationTokenRegistration? dapCancellationRegistration = null;
try
{
dapCancellationRegistration = jobRequestCancellationToken.Register(() =>
{
Trace.Info("Job cancelled - terminating DAP session");
debugSession.CancelSession();
});
}
catch (Exception ex)
{
Trace.Warning($"Failed to register DAP cancellation handler: {ex.Message}");
}
```
Note: The `dapCancellationRegistration` variable should be declared at a higher scope (around line 116 with other declarations) so it can be disposed in the finally block.
#### 4.2 Dispose the registration
**Location:** In the finally block (after line 316, alongside dapServer cleanup):
```csharp
// Dispose DAP cancellation registration
dapCancellationRegistration?.Dispose();
```
## Behavior Summary
| Scenario | Before | After |
|----------|--------|-------|
| Paused at breakpoint, job cancelled | **Stuck forever** | DAP terminated event sent, wait released, job cancels normally |
| REPL command running, job cancelled | Command runs forever | Command cancelled, job cancels normally |
| Waiting for debugger connection, job cancelled | Already handled | No change (already works) |
| Debugger disconnects voluntarily | Works | No change |
| Normal step execution, job cancelled | Works | No change (existing cancellation logic handles this) |
## Exit Code Semantics
The `exited` event uses these exit codes:
- `0` = job succeeded
- `1` = job failed
- `130` = job cancelled (standard Unix convention for SIGINT/Ctrl+C)
## Testing Scenarios
1. **Basic cancellation while paused:**
- Start a debug job, let it pause at first step
- Cancel the job from GitHub UI
- Verify: DAP client receives terminated event, runner exits cleanly
2. **Cancellation during REPL command:**
- Pause at a step, run `!sleep 60` in REPL
- Cancel the job from GitHub UI
- Verify: Sleep command terminates, DAP client receives terminated event, runner exits cleanly
3. **Cancellation before debugger connects:**
- Start a debug job (it waits for connection)
- Cancel the job before connecting a debugger
- Verify: Runner exits cleanly (this already works, just verify no regression)
4. **Normal operation (no cancellation):**
- Run through a debug session normally with step/continue
- Verify: No change in behavior
5. **Debugger disconnect:**
- Connect debugger, then disconnect it manually
- Verify: Job continues to completion (existing behavior preserved)
## Estimated Effort
| Phase | Effort |
|-------|--------|
| Phase 1: Interface update | 15 min |
| Phase 2: DapDebugSession implementation | 45 min |
| Phase 3: StepsRunner update | 5 min |
| Phase 4: JobRunner update | 15 min |
| Phase 5: Testing | 30 min |
| **Total** | **~2 hours** |
## References
- DAP Specification: https://microsoft.github.io/debug-adapter-protocol/specification
- Related plan: `dap-debugging.md` (original DAP implementation)

View File

@@ -0,0 +1,511 @@
# DAP Debug Logging Feature
**Status:** Implemented
**Date:** January 2026
**Related:** [dap-debugging.md](./dap-debugging.md), [dap-step-backwards.md](./dap-step-backwards.md)
## Overview
Add comprehensive debug logging to the DAP debugging infrastructure that can be toggled from the DAP client. This helps diagnose issues like step conclusions not updating correctly after step-back operations.
## Features
### 1. Debug Log Levels
| Level | Value | What Gets Logged |
|-------|-------|------------------|
| `Off` | 0 | Nothing |
| `Minimal` | 1 | Errors, critical state changes |
| `Normal` | 2 | Step lifecycle, checkpoint operations |
| `Verbose` | 3 | Everything including outputs, expressions |
### 2. Enabling Debug Logging
#### Via Attach Arguments (nvim-dap config)
```lua
{
type = "runner",
request = "attach",
debugLogging = true, -- Enable debug logging (defaults to "normal" level)
debugLogLevel = "verbose", -- Optional: "off", "minimal", "normal", "verbose"
}
```
#### Via REPL Commands (runtime toggle)
| Command | Description |
|---------|-------------|
| `!debug on` | Enable debug logging (level: normal) |
| `!debug off` | Disable debug logging |
| `!debug minimal` | Set level to minimal |
| `!debug normal` | Set level to normal |
| `!debug verbose` | Set level to verbose |
| `!debug status` | Show current debug settings |
### 3. Log Output Format
All debug logs are sent to the DAP console with the format:
```
[DEBUG] [Category] Message
```
Categories include:
- `[Step]` - Step lifecycle events
- `[Checkpoint]` - Checkpoint creation/restoration
- `[StepsContext]` - Steps context mutations (SetOutcome, SetConclusion, SetOutput, ClearScope)
### 4. Example Output
With `!debug verbose` enabled:
```
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
[DEBUG] [Step] Checkpoints available: 2
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=failure
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=failure
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Failed
[DEBUG] [Step] Context state: outcome=failure, conclusion=failure
# After step-back:
[DEBUG] [Checkpoint] Restoring checkpoint [1] for step 'cat doesnotexist'
[DEBUG] [StepsContext] ClearScope: scope='(root)'
[DEBUG] [StepsContext] Restoring: clearing scope '(root)', restoring 2 step(s)
[DEBUG] [StepsContext] Restored: step='thefoo', outcome=success, conclusion=success
# After re-running with file created:
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=success
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=success
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Succeeded
[DEBUG] [Step] Context state: outcome=success, conclusion=success
```
## Implementation
### Progress Checklist
- [x] **Phase 1:** Add debug logging infrastructure to DapDebugSession
- [x] **Phase 2:** Add REPL `!debug` command handling
- [x] **Phase 3:** Add OnDebugLog callback to StepsContext
- [x] **Phase 4:** Add debug logging calls throughout DapDebugSession
- [x] **Phase 5:** Hook up StepsContext logging to DapDebugSession
- [ ] **Phase 6:** Testing
---
### Phase 1: Debug Logging Infrastructure
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
Add enum and helper method:
```csharp
// Add enum for debug log levels (near top of file with other enums)
public enum DebugLogLevel
{
Off = 0,
Minimal = 1, // Errors, critical state changes
Normal = 2, // Step lifecycle, checkpoints
Verbose = 3 // Everything including outputs, expressions
}
// Add field (with other private fields)
private DebugLogLevel _debugLogLevel = DebugLogLevel.Off;
// Add helper method (in a #region Debug Logging)
private void DebugLog(string message, DebugLogLevel minLevel = DebugLogLevel.Normal)
{
if (_debugLogLevel >= minLevel)
{
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody
{
Category = "console",
Output = $"[DEBUG] {message}\n"
}
});
}
}
```
Update `HandleAttach` to parse debug logging arguments:
```csharp
private Response HandleAttach(Request request)
{
Trace.Info("Attach request handled");
// Parse debug logging from attach args
if (request.Arguments is JsonElement args)
{
if (args.TryGetProperty("debugLogging", out var debugLogging))
{
if (debugLogging.ValueKind == JsonValueKind.True)
{
_debugLogLevel = DebugLogLevel.Normal;
Trace.Info("Debug logging enabled via attach args (level: normal)");
}
}
if (args.TryGetProperty("debugLogLevel", out var level) && level.ValueKind == JsonValueKind.String)
{
_debugLogLevel = level.GetString()?.ToLower() switch
{
"minimal" => DebugLogLevel.Minimal,
"normal" => DebugLogLevel.Normal,
"verbose" => DebugLogLevel.Verbose,
"off" => DebugLogLevel.Off,
_ => _debugLogLevel
};
Trace.Info($"Debug log level set via attach args: {_debugLogLevel}");
}
}
return CreateSuccessResponse(null);
}
```
---
### Phase 2: REPL `!debug` Command
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
In `HandleEvaluateAsync`, add handling for `!debug` command before other shell command handling:
```csharp
// Near the start of HandleEvaluateAsync, after getting the expression:
// Check for debug command
if (expression.StartsWith("!debug", StringComparison.OrdinalIgnoreCase))
{
return HandleDebugCommand(expression);
}
// ... rest of existing HandleEvaluateAsync code
```
Add the handler method:
```csharp
private Response HandleDebugCommand(string command)
{
var parts = command.Split(' ', StringSplitOptions.RemoveEmptyEntries);
var arg = parts.Length > 1 ? parts[1].ToLower() : "status";
string result;
switch (arg)
{
case "on":
_debugLogLevel = DebugLogLevel.Normal;
result = "Debug logging enabled (level: normal)";
break;
case "off":
_debugLogLevel = DebugLogLevel.Off;
result = "Debug logging disabled";
break;
case "minimal":
_debugLogLevel = DebugLogLevel.Minimal;
result = "Debug logging set to minimal";
break;
case "normal":
_debugLogLevel = DebugLogLevel.Normal;
result = "Debug logging set to normal";
break;
case "verbose":
_debugLogLevel = DebugLogLevel.Verbose;
result = "Debug logging set to verbose";
break;
case "status":
default:
result = $"Debug logging: {_debugLogLevel}";
break;
}
return CreateSuccessResponse(new EvaluateResponseBody
{
Result = result,
VariablesReference = 0
});
}
```
---
### Phase 3: StepsContext OnDebugLog Callback
**File:** `src/Runner.Worker/StepsContext.cs`
Add callback property and helper:
```csharp
public sealed class StepsContext
{
private static readonly Regex _propertyRegex = new("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
private readonly DictionaryContextData _contextData = new();
/// <summary>
/// Optional callback for debug logging. When set, will be called with debug messages
/// for all StepsContext mutations.
/// </summary>
public Action<string> OnDebugLog { get; set; }
private void DebugLog(string message)
{
OnDebugLog?.Invoke(message);
}
// ... rest of class
}
```
Update `ClearScope`:
```csharp
public void ClearScope(string scopeName)
{
DebugLog($"[StepsContext] ClearScope: scope='{scopeName ?? "(root)"}'");
if (_contextData.TryGetValue(scopeName, out _))
{
_contextData[scopeName] = new DictionaryContextData();
}
}
```
Update `SetOutput`:
```csharp
public void SetOutput(
string scopeName,
string stepName,
string outputName,
string value,
out string reference)
{
var step = GetStep(scopeName, stepName);
var outputs = step["outputs"].AssertDictionary("outputs");
outputs[outputName] = new StringContextData(value);
if (_propertyRegex.IsMatch(outputName))
{
reference = $"steps.{stepName}.outputs.{outputName}";
}
else
{
reference = $"steps['{stepName}']['outputs']['{outputName}']";
}
DebugLog($"[StepsContext] SetOutput: step='{stepName}', output='{outputName}', value='{TruncateValue(value)}'");
}
private static string TruncateValue(string value, int maxLength = 50)
{
if (string.IsNullOrEmpty(value)) return "(empty)";
if (value.Length <= maxLength) return value;
return value.Substring(0, maxLength) + "...";
}
```
Update `SetConclusion`:
```csharp
public void SetConclusion(
string scopeName,
string stepName,
ActionResult conclusion)
{
var step = GetStep(scopeName, stepName);
var conclusionStr = conclusion.ToString().ToLowerInvariant();
step["conclusion"] = new StringContextData(conclusionStr);
DebugLog($"[StepsContext] SetConclusion: step='{stepName}', conclusion={conclusionStr}");
}
```
Update `SetOutcome`:
```csharp
public void SetOutcome(
string scopeName,
string stepName,
ActionResult outcome)
{
var step = GetStep(scopeName, stepName);
var outcomeStr = outcome.ToString().ToLowerInvariant();
step["outcome"] = new StringContextData(outcomeStr);
DebugLog($"[StepsContext] SetOutcome: step='{stepName}', outcome={outcomeStr}");
}
```
---
### Phase 4: DapDebugSession Logging Calls
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
#### In `OnStepStartingAsync` (after setting `_currentStep` and `_jobContext`):
```csharp
DebugLog($"[Step] Starting: '{step.DisplayName}' (index={stepIndex})");
DebugLog($"[Step] Checkpoints available: {_checkpoints.Count}");
```
#### In `OnStepCompleted` (after logging to Trace):
```csharp
DebugLog($"[Step] Completed: '{step.DisplayName}', result={result}");
// Log current steps context state for this step
if (_debugLogLevel >= DebugLogLevel.Normal)
{
var stepsScope = step.ExecutionContext?.Global?.StepsContext?.GetScope(step.ExecutionContext.ScopeName);
if (stepsScope != null && !string.IsNullOrEmpty(step.ExecutionContext?.ContextName))
{
if (stepsScope.TryGetValue(step.ExecutionContext.ContextName, out var stepData) && stepData is DictionaryContextData sd)
{
var outcome = sd.TryGetValue("outcome", out var o) && o is StringContextData os ? os.Value : "null";
var conclusion = sd.TryGetValue("conclusion", out var c) && c is StringContextData cs ? cs.Value : "null";
DebugLog($"[Step] Context state: outcome={outcome}, conclusion={conclusion}");
}
}
}
```
#### In `CreateCheckpointForPendingStep` (after creating checkpoint):
```csharp
DebugLog($"[Checkpoint] Created [{_checkpoints.Count - 1}] for step '{_pendingStep.DisplayName}'");
if (_debugLogLevel >= DebugLogLevel.Verbose)
{
DebugLog($"[Checkpoint] Snapshot contains {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
foreach (var entry in checkpoint.StepsSnapshot)
{
DebugLog($"[Checkpoint] {entry.Key}: outcome={entry.Value.Outcome}, conclusion={entry.Value.Conclusion}", DebugLogLevel.Verbose);
}
}
```
#### In `RestoreCheckpoint` (at start of method):
```csharp
DebugLog($"[Checkpoint] Restoring [{checkpointIndex}] for step '{checkpoint.StepDisplayName}'");
if (_debugLogLevel >= DebugLogLevel.Verbose)
{
DebugLog($"[Checkpoint] Snapshot has {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
}
```
#### In `RestoreStepsContext` (update existing method):
```csharp
private void RestoreStepsContext(StepsContext stepsContext, Dictionary<string, StepStateSnapshot> snapshot, string scopeName)
{
scopeName = scopeName ?? string.Empty;
DebugLog($"[StepsContext] Restoring: clearing scope '{(string.IsNullOrEmpty(scopeName) ? "(root)" : scopeName)}', will restore {snapshot.Count} step(s)");
stepsContext.ClearScope(scopeName);
foreach (var entry in snapshot)
{
var key = entry.Key;
var slashIndex = key.IndexOf('/');
if (slashIndex >= 0)
{
var snapshotScopeName = slashIndex > 0 ? key.Substring(0, slashIndex) : string.Empty;
var stepName = key.Substring(slashIndex + 1);
if (snapshotScopeName == scopeName)
{
var state = entry.Value;
if (state.Outcome.HasValue)
{
stepsContext.SetOutcome(scopeName, stepName, state.Outcome.Value);
}
if (state.Conclusion.HasValue)
{
stepsContext.SetConclusion(scopeName, stepName, state.Conclusion.Value);
}
if (state.Outputs != null)
{
foreach (var output in state.Outputs)
{
stepsContext.SetOutput(scopeName, stepName, output.Key, output.Value, out _);
}
}
DebugLog($"[StepsContext] Restored: step='{stepName}', outcome={state.Outcome}, conclusion={state.Conclusion}", DebugLogLevel.Verbose);
}
}
}
Trace.Info($"Steps context restored: cleared scope '{scopeName}' and restored {snapshot.Count} step(s) from snapshot");
}
```
---
### Phase 5: Hook Up StepsContext Logging
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
In `OnStepStartingAsync`, after setting `_jobContext`, hook up the callback (only once):
```csharp
// Hook up StepsContext debug logging (do this once when we first get jobContext)
if (jobContext.Global.StepsContext.OnDebugLog == null)
{
jobContext.Global.StepsContext.OnDebugLog = (msg) => DebugLog(msg, DebugLogLevel.Verbose);
}
```
**Note:** StepsContext logging is set to `Verbose` level since `SetOutput` can be noisy. `SetConclusion` and `SetOutcome` will still appear at `Verbose` level, but all the important state changes are also logged directly in `OnStepCompleted` at `Normal` level.
---
### Phase 6: Testing
#### Manual Testing Checklist
- [ ] `!debug status` shows "Off" by default
- [ ] `!debug on` enables logging, shows step lifecycle
- [ ] `!debug verbose` shows StepsContext mutations
- [ ] `!debug off` disables logging
- [ ] Attach with `debugLogging: true` enables logging on connect
- [ ] Attach with `debugLogLevel: "verbose"` sets correct level
- [ ] Step-back scenario shows restoration logs
- [ ] Logs help identify why conclusion might not update
#### Test Workflow
Use the test workflow with `thecat` step:
1. Run workflow, let `thecat` fail
2. Enable `!debug verbose`
3. Step back
4. Create the missing file
5. Step forward
6. Observe logs to see if `SetConclusion` is called with `success`
---
## Files Summary
### Modified Files
| File | Changes |
|------|---------|
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add `DebugLogLevel` enum, `_debugLogLevel` field, `DebugLog()` helper, `HandleDebugCommand()`, update `HandleAttach`, add logging calls throughout, hook up StepsContext callback |
| `src/Runner.Worker/StepsContext.cs` | Add `OnDebugLog` callback, `DebugLog()` helper, `TruncateValue()` helper, add logging to `ClearScope`, `SetOutput`, `SetConclusion`, `SetOutcome` |
---
## Future Enhancements (Out of Scope)
- Additional debug commands (`!debug checkpoints`, `!debug steps`, `!debug env`)
- Log to file option
- Structured logging with timestamps
- Category-based filtering (e.g., only show `[StepsContext]` logs)
- Integration with nvim-dap's virtual text for inline debug info

View File

@@ -0,0 +1,299 @@
# DAP Debugging - Bug Fixes and Enhancements
**Status:** Planned
**Date:** January 2026
**Related:** [dap-debugging.md](./dap-debugging.md)
## Overview
This document tracks bug fixes and enhancements for the DAP debugging implementation after the initial phases were completed.
## Issues
### Bug 1: Double Output in REPL Shell Commands
**Symptom:** Running commands in the REPL shell produces double output - the first one unmasked, the second one with secrets masked.
**Root Cause:** In `DapDebugSession.ExecuteShellCommandAsync()` (lines 670-773), output is sent to the debugger twice:
1. **Real-time streaming (unmasked):** Lines 678-712 stream output via DAP `output` events as data arrives from the process - but this output is NOT masked
2. **Final result (masked):** Lines 765-769 return the combined output as `EvaluateResponseBody.Result` with secrets masked
The DAP client displays both the streamed events AND the evaluate response result, causing duplication.
**Fix:**
1. Mask secrets in the real-time streaming output (add `HostContext.SecretMasker.MaskSecrets()` to lines ~690 and ~708)
2. Change the final `Result` to only show exit code summary instead of full output
---
### Bug 2: Expressions Interpreted as Shell Commands
**Symptom:** Evaluating expressions like `${{github.event_name}} == 'push'` in the Watch/Expressions pane results in them being executed as shell commands instead of being evaluated as GitHub Actions expressions.
**Root Cause:** In `DapDebugSession.HandleEvaluateAsync()` (line 514), the condition to detect shell commands is too broad:
```csharp
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
```
Since `${{github.event_name}}` starts with `$`, it gets routed to shell execution instead of expression evaluation.
**Fix:**
1. Check for `${{` prefix first - these are always GitHub Actions expressions
2. Remove the `expression.StartsWith("$")` condition entirely (ambiguous and unnecessary since REPL context handles shell commands)
3. Keep `expression.StartsWith("!")` for explicit shell override in non-REPL contexts
---
### Enhancement: Expression Interpolation in REPL Commands
**Request:** When running REPL commands like `echo ${{github.event_name}}`, the `${{ }}` expressions should be expanded before shell execution, similar to how `run:` steps work.
**Approach:** Add a helper method that uses the existing `PipelineTemplateEvaluator` infrastructure to expand expressions in the command string before passing it to the shell.
---
## Implementation Details
### File: `src/Runner.Worker/Dap/DapDebugSession.cs`
#### Change 1: Mask Real-Time Streaming Output
**Location:** Lines ~678-712 (OutputDataReceived and ErrorDataReceived handlers)
**Before:**
```csharp
processInvoker.OutputDataReceived += (sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
output.AppendLine(args.Data);
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody
{
Category = "stdout",
Output = args.Data + "\n" // NOT MASKED
}
});
}
};
```
**After:**
```csharp
processInvoker.OutputDataReceived += (sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
output.AppendLine(args.Data);
var maskedData = HostContext.SecretMasker.MaskSecrets(args.Data);
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody
{
Category = "stdout",
Output = maskedData + "\n"
}
});
}
};
```
Apply the same change to `ErrorDataReceived` handler (~lines 696-712).
---
#### Change 2: Return Only Exit Code in Result
**Location:** Lines ~767-772 (return statement in ExecuteShellCommandAsync)
**Before:**
```csharp
return new EvaluateResponseBody
{
Result = result.TrimEnd('\r', '\n'),
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
```
**After:**
```csharp
return new EvaluateResponseBody
{
Result = $"(exit code: {exitCode})",
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
```
Also remove the result combination logic (lines ~747-762) since we no longer need to build the full result string for the response.
---
#### Change 3: Fix Expression vs Shell Routing
**Location:** Lines ~511-536 (HandleEvaluateAsync method)
**Before:**
```csharp
try
{
// Check if this is a REPL/shell command (context: "repl") or starts with shell prefix
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
{
// Shell execution mode
var command = expression.TrimStart('!', '$').Trim();
// ...
}
else
{
// Expression evaluation mode
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
}
```
**After:**
```csharp
try
{
// GitHub Actions expressions start with "${{" - always evaluate as expressions
if (expression.StartsWith("${{"))
{
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
// Check if this is a REPL/shell command:
// - context is "repl" (from Debug Console pane)
// - expression starts with "!" (explicit shell prefix for Watch pane)
if (evalContext == "repl" || expression.StartsWith("!"))
{
// Shell execution mode
var command = expression.TrimStart('!').Trim();
if (string.IsNullOrEmpty(command))
{
return CreateSuccessResponse(new EvaluateResponseBody
{
Result = "(empty command)",
Type = "string",
VariablesReference = 0
});
}
var result = await ExecuteShellCommandAsync(command, executionContext);
return CreateSuccessResponse(result);
}
else
{
// Expression evaluation mode (Watch pane, hover, etc.)
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
}
```
---
#### Change 4: Add Expression Expansion Helper Method
**Location:** Add new method before `ExecuteShellCommandAsync` (~line 667)
```csharp
/// <summary>
/// Expands ${{ }} expressions within a command string.
/// For example: "echo ${{github.event_name}}" -> "echo push"
/// </summary>
private string ExpandExpressionsInCommand(string command, IExecutionContext context)
{
if (string.IsNullOrEmpty(command) || !command.Contains("${{"))
{
return command;
}
try
{
// Create a StringToken with the command
var token = new StringToken(null, null, null, command);
// Use the template evaluator to expand expressions
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var result = templateEvaluator.EvaluateStepDisplayName(
token,
context.ExpressionValues,
context.ExpressionFunctions);
// Mask secrets in the expanded command
result = HostContext.SecretMasker.MaskSecrets(result ?? command);
Trace.Info($"Expanded command: {result}");
return result;
}
catch (Exception ex)
{
Trace.Info($"Expression expansion failed, using original command: {ex.Message}");
return command;
}
}
```
**Required import:** Add `using GitHub.DistributedTask.ObjectTemplating.Tokens;` at the top of the file if not already present.
---
#### Change 5: Use Expression Expansion in Shell Execution
**Location:** Beginning of `ExecuteShellCommandAsync` method (~line 670)
**Before:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
Trace.Info($"Executing shell command: {command}");
// ...
}
```
**After:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
// Expand ${{ }} expressions in the command first
command = ExpandExpressionsInCommand(command, context);
Trace.Info($"Executing shell command: {command}");
// ...
}
```
---
## DAP Context Reference
For future reference, these are the DAP evaluate context values:
| DAP Context | Source UI | Behavior |
|-------------|-----------|----------|
| `"repl"` | Debug Console / REPL pane | Shell execution (with expression expansion) |
| `"watch"` | Watch / Expressions pane | Expression evaluation |
| `"hover"` | Editor hover (default) | Expression evaluation |
| `"variables"` | Variables pane | Expression evaluation |
| `"clipboard"` | Copy to clipboard | Expression evaluation |
---
## Testing Checklist
- [ ] REPL command output is masked and appears only once
- [ ] REPL command shows exit code in result field
- [ ] Expression `${{github.event_name}}` evaluates correctly in Watch pane
- [ ] Expression `${{github.event_name}} == 'push'` evaluates correctly
- [ ] REPL command `echo ${{github.event_name}}` expands and executes correctly
- [ ] REPL command `!ls -la` from Watch pane works (explicit shell prefix)
- [ ] Secrets are masked in all outputs (streaming and expanded commands)

View File

@@ -0,0 +1,536 @@
# DAP-Based Debugging for GitHub Actions Runner
**Status:** Draft
**Author:** GitHub Actions Team
**Date:** January 2026
## Progress Checklist
- [x] **Phase 1:** DAP Protocol Infrastructure (DapMessages.cs, DapServer.cs, basic DapDebugSession.cs)
- [x] **Phase 2:** Debug Session Logic (DapVariableProvider.cs, variable inspection, step history tracking)
- [x] **Phase 3:** StepsRunner Integration (pause hooks before/after step execution)
- [x] **Phase 4:** Expression Evaluation & Shell (REPL)
- [x] **Phase 5:** Startup Integration (JobRunner.cs modifications)
## Overview
This document describes the implementation of Debug Adapter Protocol (DAP) support in the GitHub Actions runner, enabling rich debugging of workflow jobs from any DAP-compatible editor (nvim-dap, VS Code, etc.).
## Goals
- **Primary:** Create a working demo to demonstrate the feasibility of DAP-based workflow debugging
- **Non-goal:** Production-ready, polished implementation (this is proof-of-concept)
## User Experience
1. User re-runs a failed job with "Enable debug logging" checked in GitHub UI
2. Runner (running locally) detects debug mode and starts DAP server on port 4711
3. Runner prints "Waiting for debugger on port 4711..." and pauses
4. User opens editor (nvim with nvim-dap), connects to debugger
5. Job execution begins, pausing before the first step
6. User can:
- **Inspect variables:** View `github`, `env`, `inputs`, `steps`, `secrets` (redacted), `runner`, `job` contexts
- **Evaluate expressions:** `${{ github.event.pull_request.title }}`
- **Execute shell commands:** Run arbitrary commands in the job's environment (REPL)
- **Step through job:** `next` moves to next step, `continue` runs to end
- **Pause after steps:** Inspect step outputs before continuing
## Activation
DAP debugging activates automatically when the job is in debug mode:
- User enables "Enable debug logging" when re-running a job in GitHub UI
- Server sends `ACTIONS_STEP_DEBUG=true` in job variables
- Runner sets `Global.WriteDebug = true` and `runner.debug = "1"`
- DAP server starts on port 4711
**No additional configuration required.**
### Optional Configuration
| Environment Variable | Default | Description |
|---------------------|---------|-------------|
| `ACTIONS_DAP_PORT` | `4711` | TCP port for DAP server (optional override) |
## Architecture
```
┌─────────────────────┐ ┌─────────────────────────────────────────┐
│ nvim-dap │ │ Runner.Worker │
│ (DAP Client) │◄───TCP:4711───────►│ ┌─────────────────────────────────┐ │
│ │ │ │ DapServer │ │
└─────────────────────┘ │ │ - TCP listener │ │
│ │ - DAP JSON protocol │ │
│ └──────────────┬──────────────────┘ │
│ │ │
│ ┌──────────────▼──────────────────┐ │
│ │ DapDebugSession │ │
│ │ - Debug state management │ │
│ │ - Step coordination │ │
│ │ - Variable exposure │ │
│ │ - Expression evaluation │ │
│ │ - Shell execution (REPL) │ │
│ └──────────────┬──────────────────┘ │
│ │ │
│ ┌──────────────▼──────────────────┐ │
│ │ StepsRunner (modified) │ │
│ │ - Pause before/after steps │ │
│ │ - Notify debug session │ │
│ └─────────────────────────────────┘ │
└─────────────────────────────────────────┘
```
## DAP Concept Mapping
| DAP Concept | Actions Runner Equivalent |
|-------------|---------------------------|
| Thread | Single job execution |
| Stack Frame | Current step + completed steps (step history) |
| Scope | Context category: `github`, `env`, `inputs`, `steps`, `secrets`, `runner`, `job` |
| Variable | Individual context values |
| Breakpoint | Pause before specific step (future enhancement) |
| Step Over (Next) | Execute current step, pause before next |
| Continue | Run until job end |
| Evaluate | Evaluate `${{ }}` expressions OR execute shell commands (REPL) |
## File Structure
```
src/Runner.Worker/
├── Dap/
│ ├── DapServer.cs # TCP listener, JSON protocol handling
│ ├── DapDebugSession.cs # Debug state, step coordination
│ ├── DapMessages.cs # DAP protocol message types
│ └── DapVariableProvider.cs # Converts ExecutionContext to DAP variables
```
## Implementation Phases
### Phase 1: DAP Protocol Infrastructure
#### 1.1 Protocol Messages (`Dap/DapMessages.cs`)
Base message types following DAP spec:
```csharp
public abstract class ProtocolMessage
{
public int seq { get; set; }
public string type { get; set; } // "request", "response", "event"
}
public class Request : ProtocolMessage
{
public string command { get; set; }
public object arguments { get; set; }
}
public class Response : ProtocolMessage
{
public int request_seq { get; set; }
public bool success { get; set; }
public string command { get; set; }
public string message { get; set; }
public object body { get; set; }
}
public class Event : ProtocolMessage
{
public string @event { get; set; }
public object body { get; set; }
}
```
Message framing: `Content-Length: N\r\n\r\n{json}`
#### 1.2 DAP Server (`Dap/DapServer.cs`)
```csharp
[ServiceLocator(Default = typeof(DapServer))]
public interface IDapServer : IRunnerService
{
Task StartAsync(int port);
Task WaitForConnectionAsync();
Task StopAsync();
void SendEvent(Event evt);
}
public sealed class DapServer : RunnerService, IDapServer
{
private TcpListener _listener;
private TcpClient _client;
private IDapDebugSession _session;
// TCP listener on configurable port
// Single-client connection
// Async read/write loop
// Dispatch requests to DapDebugSession
}
```
### Phase 2: Debug Session Logic
#### 2.1 Debug Session (`Dap/DapDebugSession.cs`)
```csharp
public enum DapCommand { Continue, Next, Pause, Disconnect }
public enum PauseReason { Entry, Step, Breakpoint, Pause }
[ServiceLocator(Default = typeof(DapDebugSession))]
public interface IDapDebugSession : IRunnerService
{
bool IsActive { get; }
// Called by DapServer
void Initialize(InitializeRequestArguments args);
void Attach(AttachRequestArguments args);
void ConfigurationDone();
Task<DapCommand> WaitForCommandAsync();
// Called by StepsRunner
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext);
void OnStepCompleted(IStep step);
// DAP requests
ThreadsResponse GetThreads();
StackTraceResponse GetStackTrace(int threadId);
ScopesResponse GetScopes(int frameId);
VariablesResponse GetVariables(int variablesReference);
EvaluateResponse Evaluate(string expression, string context);
}
public sealed class DapDebugSession : RunnerService, IDapDebugSession
{
private IExecutionContext _jobContext;
private IStep _currentStep;
private readonly List<IStep> _completedSteps = new();
private TaskCompletionSource<DapCommand> _commandTcs;
private bool _pauseAfterStep = false;
// Object reference management for nested variables
private int _nextVariableReference = 1;
private readonly Dictionary<int, object> _variableReferences = new();
}
```
Core state machine:
1. **Waiting for client:** Server started, no client connected
2. **Initializing:** Client connected, exchanging capabilities
3. **Ready:** `configurationDone` received, waiting to start
4. **Paused (before step):** Stopped before step execution, waiting for command
5. **Running:** Executing a step
6. **Paused (after step):** Stopped after step execution, waiting for command
#### 2.2 Variable Provider (`Dap/DapVariableProvider.cs`)
Maps `ExecutionContext.ExpressionValues` to DAP scopes and variables:
| Scope | Source | Notes |
|-------|--------|-------|
| `github` | `ExpressionValues["github"]` | Full github context |
| `env` | `ExpressionValues["env"]` | Environment variables |
| `inputs` | `ExpressionValues["inputs"]` | Step inputs (when available) |
| `steps` | `Global.StepsContext.GetScope()` | Completed step outputs |
| `secrets` | `ExpressionValues["secrets"]` | Keys shown, values = `[REDACTED]` |
| `runner` | `ExpressionValues["runner"]` | Runner context |
| `job` | `ExpressionValues["job"]` | Job status |
Nested objects (e.g., `github.event.pull_request`) become expandable variables with child references.
### Phase 3: StepsRunner Integration
#### 3.1 Modify `StepsRunner.cs`
Add debug hooks at step boundaries:
```csharp
public async Task RunAsync(IExecutionContext jobContext)
{
// Get debug session if available
var debugSession = HostContext.TryGetService<IDapDebugSession>();
bool isFirstStep = true;
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
{
// ... existing dequeue logic ...
var step = jobContext.JobSteps.Dequeue();
// Pause BEFORE step execution
if (debugSession?.IsActive == true)
{
var reason = isFirstStep ? PauseReason.Entry : PauseReason.Step;
await debugSession.OnStepStartingAsync(step, jobContext, reason);
isFirstStep = false;
}
// ... existing step execution (condition eval, RunStepAsync, etc.) ...
// Pause AFTER step execution (if requested)
if (debugSession?.IsActive == true)
{
debugSession.OnStepCompleted(step);
// Session may pause here to let user inspect outputs
}
}
}
```
### Phase 4: Expression Evaluation & Shell (REPL)
#### 4.1 Expression Evaluation
Reuse existing `PipelineTemplateEvaluator`:
```csharp
private EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
{
// Strip ${{ }} wrapper if present
var expr = expression.Trim();
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
{
expr = expr.Substring(3, expr.Length - 5).Trim();
}
var expressionToken = new BasicExpressionToken(fileId: null, line: null, column: null, expression: expr);
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var result = templateEvaluator.EvaluateStepDisplayName(
expressionToken,
context.ExpressionValues,
context.ExpressionFunctions
);
// Mask secrets and determine type
result = HostContext.SecretMasker.MaskSecrets(result ?? "null");
return new EvaluateResponseBody
{
Result = result,
Type = DetermineResultType(result),
VariablesReference = 0
};
}
```
**Supported expression formats:**
- Plain expression: `github.ref`, `steps.build.outputs.result`
- Wrapped expression: `${{ github.event.pull_request.title }}`
#### 4.2 Shell Execution (REPL)
Shell execution is triggered when:
1. The evaluate request has `context: "repl"`, OR
2. The expression starts with `!` (e.g., `!ls -la`), OR
3. The expression starts with `$` followed by a shell command (e.g., `$env`)
**Usage examples in debug console:**
```
!ls -la # List files in workspace
!env | grep GITHUB # Show GitHub environment variables
!cat $GITHUB_EVENT_PATH # View the event payload
!echo ${{ github.ref }} # Mix shell and expression (evaluated first)
```
**Implementation:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
var processInvoker = HostContext.CreateService<IProcessInvoker>();
var output = new StringBuilder();
processInvoker.OutputDataReceived += (sender, args) =>
{
output.AppendLine(args.Data);
// Stream to client in real-time via DAP output event
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody { Category = "stdout", Output = args.Data + "\n" }
});
};
processInvoker.ErrorDataReceived += (sender, args) =>
{
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody { Category = "stderr", Output = args.Data + "\n" }
});
};
// Build environment from job context (includes GITHUB_*, env context, prepend path)
var env = BuildShellEnvironment(context);
var workDir = GetWorkingDirectory(context); // Uses github.workspace
var (shell, shellArgs) = GetDefaultShell(); // Platform-specific detection
int exitCode = await processInvoker.ExecuteAsync(
workingDirectory: workDir,
fileName: shell,
arguments: string.Format(shellArgs, command),
environment: env,
requireExitCodeZero: false,
cancellationToken: CancellationToken.None
);
return new EvaluateResponseBody
{
Result = HostContext.SecretMasker.MaskSecrets(output.ToString()),
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
}
```
**Shell detection by platform:**
| Platform | Priority | Shell | Arguments |
|----------|----------|-------|-----------|
| Windows | 1 | `pwsh` | `-NoProfile -NonInteractive -Command "{0}"` |
| Windows | 2 | `powershell` | `-NoProfile -NonInteractive -Command "{0}"` |
| Windows | 3 | `cmd.exe` | `/C "{0}"` |
| Unix | 1 | `bash` | `-c "{0}"` |
| Unix | 2 | `sh` | `-c "{0}"` |
**Environment built for shell commands:**
- Current system environment variables
- GitHub Actions context variables (from `IEnvironmentContextData.GetRuntimeEnvironmentVariables()`)
- Prepend path from job context added to `PATH`
### Phase 5: Startup Integration
#### 5.1 Modify `JobRunner.cs`
Add DAP server startup after debug mode is detected (around line 159):
```csharp
if (jobContext.Global.WriteDebug)
{
jobContext.SetRunnerContext("debug", "1");
// Start DAP server for interactive debugging
var dapServer = HostContext.GetService<IDapServer>();
var port = int.Parse(
Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT") ?? "4711");
await dapServer.StartAsync(port);
Trace.Info($"DAP server listening on port {port}");
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
// Block until debugger connects
await dapServer.WaitForConnectionAsync();
Trace.Info("DAP client connected, continuing job execution");
}
```
## DAP Capabilities
Capabilities to advertise in `InitializeResponse`:
```json
{
"supportsConfigurationDoneRequest": true,
"supportsEvaluateForHovers": true,
"supportsTerminateDebuggee": true,
"supportsStepBack": false,
"supportsSetVariable": false,
"supportsRestartFrame": false,
"supportsGotoTargetsRequest": false,
"supportsStepInTargetsRequest": false,
"supportsCompletionsRequest": false,
"supportsModulesRequest": false,
"supportsExceptionOptions": false,
"supportsValueFormattingOptions": false,
"supportsExceptionInfoRequest": false,
"supportsDelayedStackTraceLoading": false,
"supportsLoadedSourcesRequest": false,
"supportsProgressReporting": false,
"supportsRunInTerminalRequest": false
}
```
## Client Configuration (nvim-dap)
Example configuration for nvim-dap:
```lua
local dap = require('dap')
dap.adapters.actions = {
type = 'server',
host = '127.0.0.1',
port = 4711,
}
dap.configurations.yaml = {
{
type = 'actions',
request = 'attach',
name = 'Attach to Actions Runner',
}
}
```
## Demo Flow
1. Trigger job re-run with "Enable debug logging" checked in GitHub UI
2. Runner starts, detects debug mode (`Global.WriteDebug == true`)
3. DAP server starts, console shows: `DAP debugger waiting for connection on port 4711...`
4. In nvim: `:lua require('dap').continue()`
5. Connection established, capabilities exchanged
6. Job begins, pauses before first step
7. nvim shows "stopped" state, variables panel shows contexts
8. User explores variables, evaluates expressions, runs shell commands
9. User presses `n` (next) to advance to next step
10. After step completes, user can inspect outputs before continuing
11. Repeat until job completes
## Testing Strategy
1. **Unit tests:** DAP protocol serialization, variable provider mapping
2. **Integration tests:** Mock DAP client verifying request/response sequences
3. **Manual testing:** Real job with nvim-dap attached
## Future Enhancements (Out of Scope for Demo)
- Composite action step-in (expand into sub-steps)
- Breakpoints on specific step names
- Watch expressions
- Conditional breakpoints
- Remote debugging (runner not on localhost)
- VS Code extension
## Estimated Effort
| Phase | Effort |
|-------|--------|
| Phase 1: Protocol Infrastructure | 4-6 hours |
| Phase 2: Debug Session Logic | 4-6 hours |
| Phase 3: StepsRunner Integration | 2-3 hours |
| Phase 4: Expression & Shell | 3-4 hours |
| Phase 5: Startup & Polish | 2-3 hours |
| **Total** | **~2-3 days** |
## Key Files to Modify
| File | Changes |
|------|---------|
| `src/Runner.Worker/JobRunner.cs` | Start DAP server when debug mode enabled |
| `src/Runner.Worker/StepsRunner.cs` | Add pause hooks before/after step execution |
| `src/Runner.Worker/Runner.Worker.csproj` | Add new Dap/ folder files |
## Key Files to Create
| File | Purpose |
|------|---------|
| `src/Runner.Worker/Dap/DapServer.cs` | TCP server, protocol framing |
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Debug state machine, command handling |
| `src/Runner.Worker/Dap/DapMessages.cs` | Protocol message types |
| `src/Runner.Worker/Dap/DapVariableProvider.cs` | Context → DAP variable conversion |
## Reference Links
- [DAP Overview](https://microsoft.github.io/debug-adapter-protocol/overview)
- [DAP Specification](https://microsoft.github.io/debug-adapter-protocol/specification)
- [Enable Debug Logging (GitHub Docs)](https://docs.github.com/en/actions/how-tos/monitor-workflows/enable-debug-logging)

View File

@@ -0,0 +1,155 @@
# DAP Step Backward: Duplicate Expression Function Fix
**Status:** Ready for Implementation
**Date:** January 2026
**Related:** [dap-step-backwards.md](./dap-step-backwards.md)
## Problem
When stepping backward and then forward again during DAP debugging, the runner crashes with:
```
System.ArgumentException: An item with the same key has already been added. Key: always
at System.Collections.Generic.Dictionary`2.TryInsert(...)
at GitHub.DistributedTask.Expressions2.ExpressionParser.ParseContext..ctor(...)
```
### Reproduction Steps
1. Run a workflow with DAP debugging enabled
2. Let a step execute (e.g., `cat doesnotexist`)
3. Before the next step runs, step backward
4. Optionally run REPL commands
5. Step forward to re-run the step
6. Step forward again → **CRASH**
## Root Cause Analysis
### The Bug
In `StepsRunner.cs:89-93`, expression functions are added to `step.ExecutionContext.ExpressionFunctions` every time a step is processed:
```csharp
// Expression functions
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
```
### Why It Fails on Step-Back
1. **First execution:** Step is dequeued, functions added to `ExpressionFunctions`, step runs
2. **Checkpoint created:** Stores a **reference** to the `IStep` object (not a deep copy) - see `StepCheckpoint.cs:65`
3. **Step backward:** Checkpoint is restored, the **same** `IStep` object is re-queued to `jobContext.JobSteps`
4. **Second execution:** Step is dequeued again, functions added **again** to the same `ExpressionFunctions` list
5. **Duplicate entries:** The list now has two `AlwaysFunction` entries, two `CancelledFunction` entries, etc.
6. **Crash:** When `ExpressionParser.ParseContext` constructor iterates over functions and adds them to a `Dictionary` (`ExpressionParser.cs:460-465`), it throws on the duplicate key "always"
### Key Insight
The `ExpressionFunctions` property on `ExecutionContext` is a `List<IFunctionInfo>` (`ExecutionContext.cs:199`). `List<T>.Add()` doesn't check for duplicates, so the functions get added twice. The error only manifests later when the expression parser builds its internal dictionary.
## Solution
### Chosen Approach: Clear ExpressionFunctions Before Adding
Clear the `ExpressionFunctions` list before adding the functions. This ensures a known state regardless of how the step arrived in the queue (fresh or restored from checkpoint).
### Why This Approach
| Approach | Pros | Cons |
|----------|------|------|
| **Clear before adding (chosen)** | Simple, explicit, ensures known state, works for any re-processing scenario | Slightly more work than strictly necessary on first run |
| Check before adding | Defensive | More complex, multiple conditions to check |
| Reset on checkpoint restore | Localized to DAP | Requires changes in multiple places, easy to miss edge cases |
The "clear before adding" approach is:
- **Simple:** One line of code
- **Robust:** Works regardless of why the step is being re-processed
- **Safe:** The functions are always the same set, so clearing and re-adding has no side effects
- **Future-proof:** If other code paths ever re-queue steps, this handles it automatically
## Implementation
### File to Modify
`src/Runner.Worker/StepsRunner.cs`
### Change
```csharp
// Before line 88, add:
step.ExecutionContext.ExpressionFunctions.Clear();
// Expression functions
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
// ... rest of the adds
```
### Full Context (lines ~85-94)
**Before:**
```csharp
// Start
step.ExecutionContext.Start();
// Expression functions
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
```
**After:**
```csharp
// Start
step.ExecutionContext.Start();
// Expression functions
// Clear first to handle step-back scenarios where the same step may be re-processed
step.ExecutionContext.ExpressionFunctions.Clear();
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
```
## Testing
### Manual Test Scenario
1. Create a workflow with multiple steps
2. Enable DAP debugging
3. Let step 1 execute
4. Pause before step 2
5. Step backward (restore to before step 1)
6. Step forward (re-run step 1)
7. Step forward again (run step 2)
8. **Verify:** No crash, step 2's condition evaluates correctly
### Edge Cases to Verify
- [ ] Step backward multiple times in a row
- [ ] Step backward then run REPL commands, then step forward
- [ ] `reverseContinue` to beginning, then step through all steps again
- [ ] Steps with `if: always()` condition (the specific function that was failing)
- [ ] Steps with `if: failure()` or `if: cancelled()` conditions
## Risk Assessment
**Risk: Low**
- The fix is minimal (one line)
- `ExpressionFunctions` is always populated with the same 5 functions at this point
- No other code depends on functions being accumulated across step re-runs
- Normal (non-DAP) execution is unaffected since steps are never re-queued
## Files Summary
| File | Change |
|------|--------|
| `src/Runner.Worker/StepsRunner.cs` | Add `Clear()` call before adding expression functions |

File diff suppressed because it is too large Load Diff

176
browser-ext/README.md Normal file
View File

@@ -0,0 +1,176 @@
# Actions DAP Debugger - Browser Extension
A Chrome extension that enables interactive debugging of GitHub Actions workflows directly in the browser. Connects to the runner's DAP server via a WebSocket proxy.
## Features
- **Variable Inspection**: Browse workflow context variables (`github`, `env`, `steps`, etc.)
- **REPL Console**: Evaluate expressions and run shell commands
- **Step Control**: Step forward, step back, continue, and reverse continue
- **GitHub Integration**: Debugger pane injects directly into the job page
## Quick Start
### 1. Start the WebSocket Proxy
The proxy bridges WebSocket connections from the browser to the DAP TCP server.
```bash
cd browser-ext/proxy
npm install
node proxy.js
```
The proxy listens on `ws://localhost:4712` and connects to the DAP server at `tcp://localhost:4711`.
### 2. Load the Extension in Chrome
1. Open Chrome and navigate to `chrome://extensions/`
2. Enable "Developer mode" (toggle in top right)
3. Click "Load unpacked"
4. Select the `browser-ext` directory
### 3. Start a Debug Session
1. Go to your GitHub repository
2. Navigate to Actions and select a workflow run
3. Click "Re-run jobs" → check "Enable debug logging"
4. Wait for the runner to display "DAP debugger waiting for connection..."
### 4. Connect the Extension
1. Navigate to the job page (`github.com/.../actions/runs/.../job/...`)
2. Click the extension icon in Chrome toolbar
3. Click "Connect"
4. The debugger pane will appear above the first workflow step
## Usage
### Variable Browser (Left Panel)
Click on scope names to expand and view variables:
- **Globals**: `github`, `env`, `runner` contexts
- **Job Outputs**: Outputs from previous jobs
- **Step Outputs**: Outputs from previous steps
### Console (Right Panel)
Enter expressions or commands:
```bash
# Evaluate expressions
${{ github.ref }}
${{ github.event_name }}
${{ env.MY_VAR }}
# Run shell commands (prefix with !)
!ls -la
!cat package.json
!env | grep GITHUB
# Modify variables
!export MY_VAR=new_value
```
### Control Buttons
| Button | Action | Description |
|--------|--------|-------------|
| ⏮ | Reverse Continue | Go back to first checkpoint |
| ◀ | Step Back | Go to previous checkpoint |
| ▶ | Continue | Run until next breakpoint/end |
| ⏭ | Step (Next) | Step to next workflow step |
## Architecture
```
Browser Extension ──WebSocket──► Proxy ──TCP──► Runner DAP Server
(port 4712) (port 4711)
```
The WebSocket proxy handles DAP message framing (Content-Length headers) and provides a browser-compatible connection.
## Configuration
### Proxy Settings
| Environment Variable | Default | Description |
|---------------------|---------|-------------|
| `WS_PORT` | 4712 | WebSocket server port |
| `DAP_HOST` | 127.0.0.1 | DAP server host |
| `DAP_PORT` | 4711 | DAP server port |
Or use CLI arguments:
```bash
node proxy.js --ws-port 4712 --dap-host 127.0.0.1 --dap-port 4711
```
### Extension Settings
Click the extension popup to configure:
- **Proxy Host**: Default `localhost`
- **Proxy Port**: Default `4712`
## File Structure
```
browser-ext/
├── manifest.json # Extension configuration
├── background/
│ └── background.js # Service worker - DAP client
├── content/
│ ├── content.js # UI injection and interaction
│ └── content.css # Debugger pane styling
├── popup/
│ ├── popup.html # Extension popup UI
│ ├── popup.js # Popup logic
│ └── popup.css # Popup styling
├── lib/
│ └── dap-protocol.js # DAP message helpers
├── proxy/
│ ├── proxy.js # WebSocket-to-TCP bridge
│ └── package.json # Proxy dependencies
└── icons/
├── icon16.png
├── icon48.png
└── icon128.png
```
## Troubleshooting
### "Failed to connect to DAP server"
1. Ensure the proxy is running: `node proxy.js`
2. Ensure the runner is waiting for a debugger connection
3. Check that debug logging is enabled for the job
### Debugger pane doesn't appear
1. Verify you're on a job page (`/actions/runs/*/job/*`)
2. Open DevTools and check for console errors
3. Reload the page after loading the extension
### Variables don't load
1. Wait for the "stopped" event (status shows PAUSED)
2. Click on a scope to expand it
3. Check the console for error messages
## Development
### Modifying the Extension
After making changes:
1. Go to `chrome://extensions/`
2. Click the refresh icon on the extension card
3. Reload the GitHub job page
### Debugging
- **Background script**: Inspect via `chrome://extensions/` → "Inspect views: service worker"
- **Content script**: Use DevTools on the GitHub page
- **Proxy**: Watch terminal output for message logs
## Security Note
The proxy and extension are designed for local development. The proxy only accepts connections from localhost. Do not expose the proxy to the network without additional security measures.

View File

@@ -0,0 +1,528 @@
/**
* Background Script - DAP Client
*
* Service worker that manages WebSocket connection to the proxy
* and handles DAP protocol communication.
*
* NOTE: Chrome MV3 service workers can be terminated after ~30s of inactivity.
* We handle this with:
* 1. Keepalive pings to keep the WebSocket active
* 2. Automatic reconnection when the service worker restarts
* 3. Storing connection state in chrome.storage.session
*/
// Connection state
let ws = null;
let connectionStatus = 'disconnected'; // disconnected, connecting, connected, paused, running, error
let sequenceNumber = 1;
const pendingRequests = new Map(); // seq -> { resolve, reject, command, timeout }
// Reconnection state
let reconnectAttempts = 0;
const MAX_RECONNECT_ATTEMPTS = 10;
const RECONNECT_BASE_DELAY = 1000; // Start with 1s, exponential backoff
let reconnectTimer = null;
let lastConnectedUrl = null;
let wasConnectedBeforeIdle = false;
// Keepalive interval - send ping every 15s to keep service worker AND WebSocket alive
// Chrome MV3 service workers get suspended after ~30s of inactivity
// We need to send actual WebSocket messages to keep both alive
const KEEPALIVE_INTERVAL = 15000;
let keepaliveTimer = null;
// Default configuration
const DEFAULT_URL = 'ws://localhost:4712';
/**
* Initialize on service worker startup - check if we should reconnect
*/
async function initializeOnStartup() {
console.log('[Background] Service worker starting up...');
try {
// Restore state from session storage
const data = await chrome.storage.session.get(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
if (data.shouldBeConnected && data.connectionUrl) {
console.log('[Background] Restoring connection after service worker restart');
lastConnectedUrl = data.connectionUrl;
wasConnectedBeforeIdle = true;
// Small delay to let things settle
setTimeout(() => {
connect(data.connectionUrl);
}, 500);
}
} catch (e) {
console.log('[Background] No session state to restore');
}
}
/**
* Save connection state to session storage (survives service worker restart)
*/
async function saveConnectionState() {
try {
await chrome.storage.session.set({
connectionUrl: lastConnectedUrl,
shouldBeConnected: connectionStatus !== 'disconnected' && connectionStatus !== 'error',
lastStatus: connectionStatus,
});
} catch (e) {
console.warn('[Background] Failed to save connection state:', e);
}
}
/**
* Clear connection state from session storage
*/
async function clearConnectionState() {
try {
await chrome.storage.session.remove(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
} catch (e) {
console.warn('[Background] Failed to clear connection state:', e);
}
}
/**
* Start keepalive ping to prevent service worker termination
* CRITICAL: We must send actual WebSocket messages to keep the connection alive.
* Just having a timer is not enough - Chrome will suspend the service worker
* and close the WebSocket with code 1001 after ~30s of inactivity.
*/
function startKeepalive() {
stopKeepalive();
keepaliveTimer = setInterval(() => {
if (ws && ws.readyState === WebSocket.OPEN) {
try {
// Send a lightweight keepalive message over WebSocket
// This does two things:
// 1. Keeps the WebSocket connection active (prevents proxy timeout)
// 2. Creates activity that keeps the Chrome service worker alive
const keepaliveMsg = JSON.stringify({ type: 'keepalive', timestamp: Date.now() });
ws.send(keepaliveMsg);
console.log('[Background] Keepalive sent');
} catch (e) {
console.error('[Background] Keepalive error:', e);
handleUnexpectedClose();
}
} else if (wasConnectedBeforeIdle || lastConnectedUrl) {
// Connection was lost, try to reconnect
console.log('[Background] Connection lost during keepalive check');
handleUnexpectedClose();
}
}, KEEPALIVE_INTERVAL);
console.log('[Background] Keepalive timer started (interval: ' + KEEPALIVE_INTERVAL + 'ms)');
}
/**
* Stop keepalive ping
*/
function stopKeepalive() {
if (keepaliveTimer) {
clearInterval(keepaliveTimer);
keepaliveTimer = null;
console.log('[Background] Keepalive timer stopped');
}
}
/**
* Handle unexpected connection close - attempt reconnection
*/
function handleUnexpectedClose() {
if (reconnectTimer) {
return; // Already trying to reconnect
}
if (!lastConnectedUrl) {
console.log('[Background] No URL to reconnect to');
return;
}
if (reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {
console.error('[Background] Max reconnection attempts reached');
connectionStatus = 'error';
broadcastStatus();
clearConnectionState();
return;
}
const delay = Math.min(RECONNECT_BASE_DELAY * Math.pow(2, reconnectAttempts), 30000);
reconnectAttempts++;
console.log(`[Background] Scheduling reconnect attempt ${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS} in ${delay}ms`);
connectionStatus = 'connecting';
broadcastStatus();
reconnectTimer = setTimeout(() => {
reconnectTimer = null;
if (connectionStatus !== 'connected' && connectionStatus !== 'paused' && connectionStatus !== 'running') {
connect(lastConnectedUrl);
}
}, delay);
}
/**
* Connect to the WebSocket proxy
*/
function connect(url) {
// Clean up existing connection
if (ws) {
try {
ws.onclose = null; // Prevent triggering reconnect
ws.close(1000, 'Reconnecting');
} catch (e) {
// Ignore
}
ws = null;
}
// Clear any pending reconnect
if (reconnectTimer) {
clearTimeout(reconnectTimer);
reconnectTimer = null;
}
connectionStatus = 'connecting';
broadcastStatus();
// Use provided URL or default
const wsUrl = url || DEFAULT_URL;
lastConnectedUrl = wsUrl;
console.log(`[Background] Connecting to ${wsUrl}`);
try {
ws = new WebSocket(wsUrl);
} catch (e) {
console.error('[Background] Failed to create WebSocket:', e);
connectionStatus = 'error';
broadcastStatus();
handleUnexpectedClose();
return;
}
ws.onopen = async () => {
console.log('[Background] WebSocket connected');
connectionStatus = 'connected';
reconnectAttempts = 0; // Reset on successful connection
wasConnectedBeforeIdle = true;
broadcastStatus();
saveConnectionState();
startKeepalive();
// Initialize DAP session
try {
await initializeDapSession();
} catch (error) {
console.error('[Background] Failed to initialize DAP session:', error);
// Don't set error status - the connection might still be usable
// The DAP server might just need the job to progress
}
};
ws.onmessage = (event) => {
try {
const message = JSON.parse(event.data);
handleDapMessage(message);
} catch (error) {
console.error('[Background] Failed to parse message:', error);
}
};
ws.onclose = (event) => {
console.log(`[Background] WebSocket closed: ${event.code} ${event.reason || '(no reason)'}`);
ws = null;
stopKeepalive();
// Reject any pending requests
for (const [seq, pending] of pendingRequests) {
if (pending.timeout) clearTimeout(pending.timeout);
pending.reject(new Error('Connection closed'));
}
pendingRequests.clear();
// Determine if we should reconnect
// Code 1000 = normal closure (user initiated)
// Code 1001 = going away (service worker idle, browser closing, etc.)
// Code 1006 = abnormal closure (connection lost)
// Code 1011 = server error
const shouldReconnect = event.code !== 1000;
if (shouldReconnect && wasConnectedBeforeIdle) {
console.log('[Background] Unexpected close, will attempt reconnect');
connectionStatus = 'connecting';
broadcastStatus();
handleUnexpectedClose();
} else {
connectionStatus = 'disconnected';
wasConnectedBeforeIdle = false;
broadcastStatus();
clearConnectionState();
}
};
ws.onerror = (event) => {
console.error('[Background] WebSocket error:', event);
// onclose will be called after onerror, so we handle reconnection there
};
}
/**
* Disconnect from the WebSocket proxy
*/
function disconnect() {
// Stop any reconnection attempts
if (reconnectTimer) {
clearTimeout(reconnectTimer);
reconnectTimer = null;
}
reconnectAttempts = 0;
wasConnectedBeforeIdle = false;
stopKeepalive();
if (ws) {
// Send disconnect request to DAP server first
sendDapRequest('disconnect', {}).catch(() => {});
// Prevent reconnection on this close
const socket = ws;
ws = null;
socket.onclose = null;
try {
socket.close(1000, 'User disconnected');
} catch (e) {
// Ignore
}
}
connectionStatus = 'disconnected';
broadcastStatus();
clearConnectionState();
}
/**
* Initialize DAP session (initialize + attach + configurationDone)
*/
async function initializeDapSession() {
// 1. Initialize
const initResponse = await sendDapRequest('initialize', {
clientID: 'browser-extension',
clientName: 'Actions DAP Debugger',
adapterID: 'github-actions-runner',
pathFormat: 'path',
linesStartAt1: true,
columnsStartAt1: true,
supportsVariableType: true,
supportsVariablePaging: true,
supportsRunInTerminalRequest: false,
supportsProgressReporting: false,
supportsInvalidatedEvent: true,
});
console.log('[Background] Initialize response:', initResponse);
// 2. Attach to running session
const attachResponse = await sendDapRequest('attach', {});
console.log('[Background] Attach response:', attachResponse);
// 3. Configuration done
const configResponse = await sendDapRequest('configurationDone', {});
console.log('[Background] ConfigurationDone response:', configResponse);
}
/**
* Send a DAP request and return a promise for the response
*/
function sendDapRequest(command, args = {}) {
return new Promise((resolve, reject) => {
if (!ws || ws.readyState !== WebSocket.OPEN) {
reject(new Error('Not connected'));
return;
}
const seq = sequenceNumber++;
const request = {
seq,
type: 'request',
command,
arguments: args,
};
console.log(`[Background] Sending DAP request: ${command} (seq: ${seq})`);
// Set timeout for request
const timeout = setTimeout(() => {
if (pendingRequests.has(seq)) {
pendingRequests.delete(seq);
reject(new Error(`Request timed out: ${command}`));
}
}, 30000);
pendingRequests.set(seq, { resolve, reject, command, timeout });
try {
ws.send(JSON.stringify(request));
} catch (e) {
pendingRequests.delete(seq);
clearTimeout(timeout);
reject(new Error(`Failed to send request: ${e.message}`));
}
});
}
/**
* Handle incoming DAP message (response or event)
*/
function handleDapMessage(message) {
if (message.type === 'response') {
handleDapResponse(message);
} else if (message.type === 'event') {
handleDapEvent(message);
} else if (message.type === 'proxy-error') {
console.error('[Background] Proxy error:', message.message);
// Don't immediately set error status - might be transient
} else if (message.type === 'keepalive-ack') {
// Keepalive acknowledged by proxy - connection is healthy
console.log('[Background] Keepalive acknowledged');
}
}
/**
* Handle DAP response
*/
function handleDapResponse(response) {
const pending = pendingRequests.get(response.request_seq);
if (!pending) {
console.warn(`[Background] No pending request for seq ${response.request_seq}`);
return;
}
pendingRequests.delete(response.request_seq);
if (pending.timeout) clearTimeout(pending.timeout);
if (response.success) {
console.log(`[Background] DAP response success: ${response.command}`);
pending.resolve(response.body || {});
} else {
console.error(`[Background] DAP response error: ${response.command} - ${response.message}`);
pending.reject(new Error(response.message || 'Unknown error'));
}
}
/**
* Handle DAP event
*/
function handleDapEvent(event) {
console.log(`[Background] DAP event: ${event.event}`, event.body);
switch (event.event) {
case 'initialized':
// DAP server is ready
break;
case 'stopped':
connectionStatus = 'paused';
broadcastStatus();
saveConnectionState();
break;
case 'continued':
connectionStatus = 'running';
broadcastStatus();
saveConnectionState();
break;
case 'terminated':
connectionStatus = 'disconnected';
wasConnectedBeforeIdle = false;
broadcastStatus();
clearConnectionState();
break;
case 'output':
// Output event - forward to content scripts
break;
}
// Broadcast event to all content scripts
broadcastEvent(event);
}
/**
* Broadcast connection status to popup and content scripts
*/
function broadcastStatus() {
const statusMessage = { type: 'status-changed', status: connectionStatus };
// Broadcast to all extension contexts (popup)
chrome.runtime.sendMessage(statusMessage).catch(() => {});
// Broadcast to content scripts
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
if (chrome.runtime.lastError) return;
tabs.forEach((tab) => {
chrome.tabs.sendMessage(tab.id, statusMessage).catch(() => {});
});
});
}
/**
* Broadcast DAP event to content scripts
*/
function broadcastEvent(event) {
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
if (chrome.runtime.lastError) return;
tabs.forEach((tab) => {
chrome.tabs.sendMessage(tab.id, { type: 'dap-event', event }).catch(() => {});
});
});
}
/**
* Message handler for requests from popup and content scripts
*/
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
console.log('[Background] Received message:', message.type);
switch (message.type) {
case 'get-status':
sendResponse({ status: connectionStatus, reconnecting: reconnectTimer !== null });
return false;
case 'connect':
reconnectAttempts = 0; // Reset attempts on manual connect
connect(message.url || DEFAULT_URL);
sendResponse({ status: connectionStatus });
return false;
case 'disconnect':
disconnect();
sendResponse({ status: connectionStatus });
return false;
case 'dap-request':
// Handle DAP request from content script
sendDapRequest(message.command, message.args || {})
.then((body) => {
sendResponse({ success: true, body });
})
.catch((error) => {
sendResponse({ success: false, error: error.message });
});
return true; // Will respond asynchronously
default:
console.warn('[Background] Unknown message type:', message.type);
return false;
}
});
// Initialize on startup
initializeOnStartup();
// Log startup
console.log('[Background] Actions DAP Debugger background script loaded');

View File

@@ -0,0 +1,337 @@
/**
* Content Script Styles
*
* Matches GitHub's Primer design system for seamless integration.
* Uses CSS custom properties for light/dark mode support.
*/
/* Debugger Pane Container */
.dap-debugger-pane {
background-color: var(--bgColor-default, #0d1117);
border-color: var(--borderColor-default, #30363d) !important;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
font-size: 14px;
}
/* Header */
.dap-header {
background-color: var(--bgColor-muted, #161b22);
}
.dap-header .octicon {
color: var(--fgColor-muted, #8b949e);
}
.dap-step-info {
flex: 1;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
/* Status Labels */
.dap-status-label {
flex-shrink: 0;
}
.Label--attention {
background-color: #9e6a03 !important;
color: #ffffff !important;
border: none !important;
}
.Label--success {
background-color: #238636 !important;
color: #ffffff !important;
border: none !important;
}
.Label--danger {
background-color: #da3633 !important;
color: #ffffff !important;
border: none !important;
}
.Label--secondary {
background-color: #30363d !important;
color: #8b949e !important;
border: none !important;
}
/* Content Area */
.dap-content {
min-height: 200px;
max-height: 400px;
}
/* Scopes Panel */
.dap-scopes {
border-color: var(--borderColor-default, #30363d) !important;
min-width: 150px;
}
.dap-scope-header {
background-color: var(--bgColor-muted, #161b22);
font-size: 12px;
}
.dap-scope-tree {
font-size: 12px;
line-height: 1.6;
}
/* Tree Nodes */
.dap-tree-node {
padding: 1px 0;
}
.dap-tree-content {
display: flex;
align-items: flex-start;
padding: 2px 4px;
border-radius: 3px;
}
.dap-tree-content:hover {
background-color: var(--bgColor-muted, #161b22);
}
.dap-tree-children {
margin-left: 16px;
border-left: 1px solid var(--borderColor-muted, #21262d);
padding-left: 8px;
}
.dap-expand-icon {
display: inline-block;
width: 16px;
text-align: center;
color: var(--fgColor-muted, #8b949e);
font-size: 10px;
flex-shrink: 0;
user-select: none;
}
.dap-tree-node .text-bold {
color: var(--fgColor-default, #e6edf3);
font-weight: 600;
word-break: break-word;
}
.dap-tree-node .color-fg-muted {
color: var(--fgColor-muted, #8b949e);
word-break: break-word;
}
/* REPL Console */
.dap-repl {
display: flex;
flex-direction: column;
}
.dap-repl-header {
background-color: var(--bgColor-muted, #161b22);
font-size: 12px;
flex-shrink: 0;
}
.dap-repl-output {
background-color: var(--bgColor-inset, #010409);
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
font-size: 12px;
line-height: 1.5;
padding: 8px;
flex: 1;
overflow-y: auto;
min-height: 100px;
}
.dap-output-input {
color: var(--fgColor-muted, #8b949e);
}
.dap-output-result {
color: var(--fgColor-default, #e6edf3);
}
.dap-output-stdout {
color: var(--fgColor-default, #e6edf3);
}
.dap-output-error {
color: var(--fgColor-danger, #f85149);
}
/* REPL Input */
.dap-repl-input {
flex-shrink: 0;
}
.dap-repl-input input {
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
font-size: 12px;
background-color: var(--bgColor-inset, #010409) !important;
border-color: var(--borderColor-default, #30363d) !important;
color: var(--fgColor-default, #e6edf3) !important;
width: 100%;
}
.dap-repl-input input:focus {
border-color: var(--focus-outlineColor, #1f6feb) !important;
outline: none;
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
}
.dap-repl-input input:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.dap-repl-input input::placeholder {
color: var(--fgColor-muted, #8b949e);
}
/* Control Buttons */
.dap-controls {
background-color: var(--bgColor-muted, #161b22);
}
.dap-controls button {
min-width: 32px;
height: 28px;
display: inline-flex;
align-items: center;
justify-content: center;
padding: 0 8px;
}
.dap-controls button svg {
width: 14px;
height: 14px;
}
.dap-controls button:disabled {
opacity: 0.4;
cursor: not-allowed;
}
.dap-controls button:not(:disabled):hover {
background-color: var(--bgColor-accent-muted, #388bfd26);
}
.dap-step-counter {
flex-shrink: 0;
}
/* Utility Classes (in case GitHub's aren't loaded) */
.d-flex { display: flex; }
.flex-column { flex-direction: column; }
.flex-items-center { align-items: center; }
.flex-auto { flex: 1 1 auto; }
.p-2 { padding: 8px; }
.px-2 { padding-left: 8px; padding-right: 8px; }
.mx-2 { margin-left: 8px; margin-right: 8px; }
.mb-2 { margin-bottom: 8px; }
.ml-2 { margin-left: 8px; }
.ml-3 { margin-left: 16px; }
.mr-2 { margin-right: 8px; }
.ml-auto { margin-left: auto; }
.border { border: 1px solid var(--borderColor-default, #30363d); }
.border-bottom { border-bottom: 1px solid var(--borderColor-default, #30363d); }
.border-top { border-top: 1px solid var(--borderColor-default, #30363d); }
.border-right { border-right: 1px solid var(--borderColor-default, #30363d); }
.rounded-2 { border-radius: 6px; }
.overflow-auto { overflow: auto; }
.text-bold { font-weight: 600; }
.text-mono { font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace; }
.text-small { font-size: 12px; }
.color-fg-muted { color: var(--fgColor-muted, #8b949e); }
.color-fg-danger { color: var(--fgColor-danger, #f85149); }
.color-fg-default { color: var(--fgColor-default, #e6edf3); }
/* Light mode overrides */
@media (prefers-color-scheme: light) {
.dap-debugger-pane {
background-color: var(--bgColor-default, #ffffff);
border-color: var(--borderColor-default, #d0d7de) !important;
}
.dap-header,
.dap-scope-header,
.dap-repl-header,
.dap-controls {
background-color: var(--bgColor-muted, #f6f8fa);
}
.dap-repl-output,
.dap-repl-input input {
background-color: var(--bgColor-inset, #f6f8fa) !important;
}
.dap-tree-node .text-bold {
color: var(--fgColor-default, #1f2328);
}
.color-fg-muted {
color: var(--fgColor-muted, #656d76);
}
}
/* Respect GitHub's color mode data attribute */
[data-color-mode="light"] .dap-debugger-pane,
html[data-color-mode="light"] .dap-debugger-pane {
background-color: #ffffff;
border-color: #d0d7de !important;
}
[data-color-mode="light"] .dap-header,
[data-color-mode="light"] .dap-scope-header,
[data-color-mode="light"] .dap-repl-header,
[data-color-mode="light"] .dap-controls,
html[data-color-mode="light"] .dap-header,
html[data-color-mode="light"] .dap-scope-header,
html[data-color-mode="light"] .dap-repl-header,
html[data-color-mode="light"] .dap-controls {
background-color: #f6f8fa;
}
[data-color-mode="light"] .dap-repl-output,
[data-color-mode="light"] .dap-repl-input input,
html[data-color-mode="light"] .dap-repl-output,
html[data-color-mode="light"] .dap-repl-input input {
background-color: #f6f8fa !important;
}
/* Debug Button in Header */
.dap-debug-btn-container {
display: flex;
align-items: center;
}
.dap-debug-btn {
display: inline-flex;
align-items: center;
gap: 4px;
font-size: 14px;
font-weight: 500;
}
.dap-debug-btn.selected {
background-color: var(--bgColor-accent-muted, #388bfd26);
border-color: var(--borderColor-accent-emphasis, #388bfd);
}
.dap-debug-btn:hover:not(:disabled) {
background-color: var(--bgColor-neutral-muted, #6e768166);
}
/* Light mode for debug button */
[data-color-mode="light"] .dap-debug-btn.selected,
html[data-color-mode="light"] .dap-debug-btn.selected {
background-color: #ddf4ff;
border-color: #54aeff;
}

View File

@@ -0,0 +1,759 @@
/**
* Content Script - Debugger UI
*
* Injects the debugger pane into GitHub Actions job pages and handles
* all UI interactions.
*/
// State
let debuggerPane = null;
let currentFrameId = 0;
let isConnected = false;
let replHistory = [];
let replHistoryIndex = -1;
// HTML escape helper
function escapeHtml(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
/**
* Strip result indicator suffix from step name
* e.g., "Run tests [running]" -> "Run tests"
*/
function stripResultIndicator(name) {
return name.replace(/\s*\[(running|success|failure|skipped|cancelled)\]$/i, '');
}
/**
* Send DAP request to background script
*/
function sendDapRequest(command, args = {}) {
return new Promise((resolve, reject) => {
chrome.runtime.sendMessage({ type: 'dap-request', command, args }, (response) => {
if (chrome.runtime.lastError) {
reject(new Error(chrome.runtime.lastError.message));
} else if (response && response.success) {
resolve(response.body);
} else {
reject(new Error(response?.error || 'Unknown error'));
}
});
});
}
/**
* Build map of steps from DOM
*/
function buildStepMap() {
const steps = document.querySelectorAll('check-step');
const map = new Map();
steps.forEach((el, idx) => {
map.set(idx, {
element: el,
number: parseInt(el.dataset.number),
name: el.dataset.name,
conclusion: el.dataset.conclusion,
externalId: el.dataset.externalId,
});
});
return map;
}
/**
* Find step element by name
*/
function findStepByName(stepName) {
return document.querySelector(`check-step[data-name="${CSS.escape(stepName)}"]`);
}
/**
* Find step element by number
*/
function findStepByNumber(stepNumber) {
return document.querySelector(`check-step[data-number="${stepNumber}"]`);
}
/**
* Get all step elements
*/
function getAllSteps() {
return document.querySelectorAll('check-step');
}
/**
* Create the debugger pane HTML
*/
function createDebuggerPaneHTML() {
return `
<div class="dap-header d-flex flex-items-center p-2 border-bottom">
<svg class="octicon mr-2" viewBox="0 0 16 16" width="16" height="16">
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
</svg>
<span class="text-bold">Debugger</span>
<span class="dap-step-info color-fg-muted ml-2">Connecting...</span>
<span class="Label dap-status-label ml-auto">CONNECTING</span>
</div>
<div class="dap-content d-flex" style="height: 300px;">
<!-- Scopes Panel -->
<div class="dap-scopes border-right overflow-auto" style="width: 33%;">
<div class="dap-scope-header p-2 text-bold border-bottom">Variables</div>
<div class="dap-scope-tree p-2">
<div class="color-fg-muted">Connect to view variables</div>
</div>
</div>
<!-- REPL Console -->
<div class="dap-repl d-flex flex-column" style="width: 67%;">
<div class="dap-repl-header p-2 text-bold border-bottom">Console</div>
<div class="dap-repl-output overflow-auto flex-auto p-2 text-mono text-small">
<div class="color-fg-muted">Welcome to Actions DAP Debugger</div>
<div class="color-fg-muted">Enter expressions like: \${{ github.ref }}</div>
<div class="color-fg-muted">Or shell commands: !ls -la</div>
</div>
<div class="dap-repl-input border-top p-2">
<input type="text" class="form-control input-sm text-mono"
placeholder="Enter expression or !command" disabled>
</div>
</div>
</div>
<!-- Control buttons -->
<div class="dap-controls d-flex flex-items-center p-2 border-top">
<button class="btn btn-sm mr-2" data-action="reverseContinue" title="Reverse Continue (go to first checkpoint)" disabled>
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V8.5l5 4V8.5l5 4V2.5l-5 4V2.5l-5 4V2z"/></svg>
</button>
<button class="btn btn-sm mr-2" data-action="stepBack" title="Step Back" disabled>
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V2H2zm3 6 7 5V3L5 8z"/></svg>
</button>
<button class="btn btn-sm btn-primary mr-2" data-action="continue" title="Continue" disabled>
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M4 2l10 6-10 6z"/></svg>
</button>
<button class="btn btn-sm mr-2" data-action="next" title="Step to Next" disabled>
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 3l7 5-7 5V3zm7 5l5 0V2h2v12h-2V8.5l-5 0z"/></svg>
</button>
<span class="dap-step-counter color-fg-muted ml-auto text-small">
Not connected
</span>
</div>
`;
}
/**
* Inject debugger pane into the page
*/
function injectDebuggerPane() {
// Remove existing pane if any
const existing = document.querySelector('.dap-debugger-pane');
if (existing) existing.remove();
// Find where to inject
const stepsContainer = document.querySelector('check-steps');
if (!stepsContainer) {
console.warn('[Content] No check-steps container found');
return null;
}
// Create pane
const pane = document.createElement('div');
pane.className = 'dap-debugger-pane mx-2 mb-2 border rounded-2';
pane.innerHTML = createDebuggerPaneHTML();
// Insert before the first real workflow step (skip "Set up job" at index 0)
const steps = stepsContainer.querySelectorAll('check-step');
const targetStep = steps.length > 1 ? steps[1] : stepsContainer.firstChild;
stepsContainer.insertBefore(pane, targetStep);
// Setup event handlers
setupPaneEventHandlers(pane);
debuggerPane = pane;
return pane;
}
/**
* Move debugger pane to before a specific step
*/
function moveDebuggerPane(stepElement, stepName) {
if (!debuggerPane || !stepElement) return;
// Move the pane
stepElement.parentNode.insertBefore(debuggerPane, stepElement);
// Update step info
const stepInfo = debuggerPane.querySelector('.dap-step-info');
if (stepInfo) {
stepInfo.textContent = `Paused before: ${stepName}`;
}
}
/**
* Setup event handlers for debugger pane
*/
function setupPaneEventHandlers(pane) {
// Control buttons
pane.querySelectorAll('[data-action]').forEach((btn) => {
btn.addEventListener('click', async () => {
const action = btn.dataset.action;
enableControls(false);
updateStatus('RUNNING');
try {
await sendDapRequest(action, { threadId: 1 });
} catch (error) {
console.error(`[Content] DAP ${action} failed:`, error);
appendOutput(`Error: ${error.message}`, 'error');
enableControls(true);
updateStatus('ERROR');
}
});
});
// REPL input
const input = pane.querySelector('.dap-repl-input input');
if (input) {
input.addEventListener('keydown', handleReplKeydown);
}
}
/**
* Handle REPL input keydown
*/
async function handleReplKeydown(e) {
const input = e.target;
if (e.key === 'Enter') {
const command = input.value.trim();
if (!command) return;
replHistory.push(command);
replHistoryIndex = replHistory.length;
input.value = '';
// Show command
appendOutput(`> ${command}`, 'input');
// Send to DAP
try {
const response = await sendDapRequest('evaluate', {
expression: command,
frameId: currentFrameId,
context: command.startsWith('!') ? 'repl' : 'watch',
});
// Only show result if it's NOT an exit code summary
// (shell command output is already streamed via output events)
if (response.result && !/^\(exit code: -?\d+\)$/.test(response.result)) {
appendOutput(response.result, 'result');
}
} catch (error) {
appendOutput(error.message, 'error');
}
} else if (e.key === 'ArrowUp') {
if (replHistoryIndex > 0) {
replHistoryIndex--;
input.value = replHistory[replHistoryIndex];
}
e.preventDefault();
} else if (e.key === 'ArrowDown') {
if (replHistoryIndex < replHistory.length - 1) {
replHistoryIndex++;
input.value = replHistory[replHistoryIndex];
} else {
replHistoryIndex = replHistory.length;
input.value = '';
}
e.preventDefault();
}
}
/**
* Append output to REPL console
*/
function appendOutput(text, type) {
const output = document.querySelector('.dap-repl-output');
if (!output) return;
// Handle multi-line output - each line gets its own div
const lines = text.split('\n');
lines.forEach((l) => {
const div = document.createElement('div');
div.className = `dap-output-${type}`;
if (type === 'error') div.classList.add('color-fg-danger');
if (type === 'input') div.classList.add('color-fg-muted');
div.textContent = l;
output.appendChild(div);
});
output.scrollTop = output.scrollHeight;
}
/**
* Enable/disable control buttons
*/
function enableControls(enabled) {
if (!debuggerPane) return;
debuggerPane.querySelectorAll('.dap-controls button').forEach((btn) => {
btn.disabled = !enabled;
});
const input = debuggerPane.querySelector('.dap-repl-input input');
if (input) {
input.disabled = !enabled;
}
}
/**
* Update status display
*/
function updateStatus(status, extra) {
if (!debuggerPane) return;
const label = debuggerPane.querySelector('.dap-status-label');
if (label) {
label.textContent = status;
label.className = 'Label dap-status-label ml-auto ';
switch (status) {
case 'PAUSED':
label.classList.add('Label--attention');
break;
case 'RUNNING':
label.classList.add('Label--success');
break;
case 'TERMINATED':
case 'DISCONNECTED':
label.classList.add('Label--secondary');
break;
case 'ERROR':
label.classList.add('Label--danger');
break;
default:
label.classList.add('Label--secondary');
}
}
// Update step counter if extra info provided
if (extra) {
const counter = debuggerPane.querySelector('.dap-step-counter');
if (counter) {
counter.textContent = extra;
}
}
}
/**
* Load scopes for current frame
*/
async function loadScopes(frameId) {
const scopesContainer = document.querySelector('.dap-scope-tree');
if (!scopesContainer) return;
scopesContainer.innerHTML = '<div class="color-fg-muted">Loading...</div>';
try {
console.log('[Content] Loading scopes for frame:', frameId);
const response = await sendDapRequest('scopes', { frameId });
console.log('[Content] Scopes response:', response);
scopesContainer.innerHTML = '';
if (!response.scopes || response.scopes.length === 0) {
scopesContainer.innerHTML = '<div class="color-fg-muted">No scopes available</div>';
return;
}
for (const scope of response.scopes) {
console.log('[Content] Creating tree node for scope:', scope.name, 'variablesRef:', scope.variablesReference);
// Only mark as expandable if variablesReference > 0
const isExpandable = scope.variablesReference > 0;
const node = createTreeNode(scope.name, scope.variablesReference, isExpandable);
scopesContainer.appendChild(node);
}
} catch (error) {
console.error('[Content] Failed to load scopes:', error);
scopesContainer.innerHTML = `<div class="color-fg-danger">Error: ${escapeHtml(error.message)}</div>`;
}
}
/**
* Create a tree node for scope/variable display
*/
function createTreeNode(name, variablesReference, isExpandable, value) {
const node = document.createElement('div');
node.className = 'dap-tree-node';
node.dataset.variablesRef = variablesReference;
const content = document.createElement('div');
content.className = 'dap-tree-content';
// Expand icon
const expandIcon = document.createElement('span');
expandIcon.className = 'dap-expand-icon';
expandIcon.textContent = isExpandable ? '\u25B6' : ' '; // ▶ or space
content.appendChild(expandIcon);
// Name
const nameSpan = document.createElement('span');
nameSpan.className = 'text-bold';
nameSpan.textContent = name;
content.appendChild(nameSpan);
// Value (if provided)
if (value !== undefined) {
const valueSpan = document.createElement('span');
valueSpan.className = 'color-fg-muted';
valueSpan.textContent = `: ${value}`;
content.appendChild(valueSpan);
}
node.appendChild(content);
if (isExpandable && variablesReference > 0) {
content.style.cursor = 'pointer';
content.addEventListener('click', () => toggleTreeNode(node));
}
return node;
}
/**
* Toggle tree node expansion
*/
async function toggleTreeNode(node) {
const children = node.querySelector('.dap-tree-children');
const expandIcon = node.querySelector('.dap-expand-icon');
if (children) {
// Toggle visibility
children.hidden = !children.hidden;
expandIcon.textContent = children.hidden ? '\u25B6' : '\u25BC'; // ▶ or ▼
return;
}
// Fetch children
const variablesRef = parseInt(node.dataset.variablesRef);
if (!variablesRef) return;
expandIcon.textContent = '...';
try {
const response = await sendDapRequest('variables', { variablesReference: variablesRef });
const childContainer = document.createElement('div');
childContainer.className = 'dap-tree-children ml-3';
for (const variable of response.variables) {
const hasChildren = variable.variablesReference > 0;
const childNode = createTreeNode(
variable.name,
variable.variablesReference,
hasChildren,
variable.value
);
childContainer.appendChild(childNode);
}
node.appendChild(childContainer);
expandIcon.textContent = '\u25BC'; // ▼
} catch (error) {
console.error('[Content] Failed to load variables:', error);
expandIcon.textContent = '\u25B6'; // ▶
}
}
/**
* Handle stopped event from DAP
*/
async function handleStoppedEvent(body) {
console.log('[Content] Stopped event:', body);
isConnected = true;
updateStatus('PAUSED', body.reason || 'paused');
enableControls(true);
// Get current location
try {
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
const currentFrame = stackTrace.stackFrames[0];
currentFrameId = currentFrame.id;
// Strip result indicator from step name for DOM lookup
// e.g., "Run tests [running]" -> "Run tests"
const rawStepName = stripResultIndicator(currentFrame.name);
let stepElement = findStepByName(rawStepName);
if (!stepElement && currentFrame.line > 0) {
// Fallback: use step number from Line property
// Add 1 to account for "Set up job" which is always step 1 in GitHub UI but not in DAP
stepElement = findStepByNumber(currentFrame.line + 1);
}
if (stepElement) {
moveDebuggerPane(stepElement, rawStepName);
}
// Update step counter
const counter = debuggerPane?.querySelector('.dap-step-counter');
if (counter) {
counter.textContent = `Step ${currentFrame.line || currentFrame.id} of ${stackTrace.stackFrames.length}`;
}
// Load scopes
await loadScopes(currentFrame.id);
}
} catch (error) {
console.error('[Content] Failed to get stack trace:', error);
appendOutput(`Error: ${error.message}`, 'error');
}
}
/**
* Handle output event from DAP
*/
function handleOutputEvent(body) {
if (body.output) {
const category = body.category === 'stderr' ? 'error' : 'stdout';
appendOutput(body.output.trimEnd(), category);
}
}
/**
* Handle terminated event from DAP
*/
function handleTerminatedEvent() {
isConnected = false;
updateStatus('TERMINATED');
enableControls(false);
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
if (stepInfo) {
stepInfo.textContent = 'Session ended';
}
}
/**
* Load current debug state (used when page loads while already paused)
*/
async function loadCurrentDebugState() {
if (!debuggerPane) return;
try {
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
const currentFrame = stackTrace.stackFrames[0];
currentFrameId = currentFrame.id;
// Move pane to current step
// Strip result indicator from step name for DOM lookup
const rawStepName = stripResultIndicator(currentFrame.name);
let stepElement = findStepByName(rawStepName);
if (!stepElement && currentFrame.line > 0) {
// Fallback: use step number from Line property
// Add 1 to account for "Set up job" which is always step 1 in GitHub UI but not in DAP
stepElement = findStepByNumber(currentFrame.line + 1);
}
if (stepElement) {
moveDebuggerPane(stepElement, rawStepName);
}
// Update step counter
const counter = debuggerPane.querySelector('.dap-step-counter');
if (counter) {
counter.textContent = `Step ${currentFrame.line || currentFrame.id} of ${stackTrace.stackFrames.length}`;
}
// Load scopes
await loadScopes(currentFrame.id);
}
} catch (error) {
console.error('[Content] Failed to load current debug state:', error);
}
}
/**
* Handle status change from background
*/
function handleStatusChange(status) {
console.log('[Content] Status changed:', status);
switch (status) {
case 'connected':
isConnected = true;
updateStatus('CONNECTED');
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
if (stepInfo) {
stepInfo.textContent = 'Waiting for debug event...';
}
break;
case 'paused':
isConnected = true;
updateStatus('PAUSED');
enableControls(true);
loadCurrentDebugState();
break;
case 'running':
isConnected = true;
updateStatus('RUNNING');
enableControls(false);
break;
case 'disconnected':
isConnected = false;
updateStatus('DISCONNECTED');
enableControls(false);
break;
case 'error':
isConnected = false;
updateStatus('ERROR');
enableControls(false);
break;
}
}
/**
* Listen for messages from background script
*/
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
console.log('[Content] Received message:', message.type);
switch (message.type) {
case 'dap-event':
const event = message.event;
switch (event.event) {
case 'stopped':
handleStoppedEvent(event.body);
break;
case 'output':
handleOutputEvent(event.body);
break;
case 'terminated':
handleTerminatedEvent();
break;
}
break;
case 'status-changed':
handleStatusChange(message.status);
break;
}
});
/**
* Inject debug button into GitHub Actions UI header
*/
function injectDebugButton() {
const container = document.querySelector('.js-check-run-search');
if (!container || container.querySelector('.dap-debug-btn-container')) {
return; // Already injected or container not found
}
const buttonContainer = document.createElement('div');
buttonContainer.className = 'ml-2 dap-debug-btn-container';
buttonContainer.innerHTML = `
<button type="button" class="btn btn-sm dap-debug-btn" title="Toggle DAP Debugger">
<svg viewBox="0 0 16 16" width="16" height="16" class="octicon mr-1" style="vertical-align: text-bottom;">
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
</svg>
Debug
</button>
`;
const button = buttonContainer.querySelector('button');
button.addEventListener('click', () => {
let pane = document.querySelector('.dap-debugger-pane');
if (pane) {
// Toggle visibility
pane.hidden = !pane.hidden;
button.classList.toggle('selected', !pane.hidden);
} else {
// Create and show pane
pane = injectDebuggerPane();
if (pane) {
button.classList.add('selected');
// Check connection status after creating pane
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
if (response && response.status) {
handleStatusChange(response.status);
}
});
}
}
});
// Insert at the beginning of the container
container.insertBefore(buttonContainer, container.firstChild);
console.log('[Content] Debug button injected');
}
/**
* Initialize content script
*/
function init() {
console.log('[Content] Actions DAP Debugger content script loaded');
// Check if we're on a job page
const steps = getAllSteps();
if (steps.length === 0) {
console.log('[Content] No steps found, waiting for DOM...');
// Wait for steps to appear
const observer = new MutationObserver((mutations) => {
const steps = getAllSteps();
if (steps.length > 0) {
observer.disconnect();
console.log('[Content] Steps found, injecting debug button');
injectDebugButton();
}
});
observer.observe(document.body, { childList: true, subtree: true });
return;
}
// Inject debug button in header (user can click to show debugger pane)
injectDebugButton();
// Check current connection status
chrome.runtime.sendMessage({ type: 'get-status' }, async (response) => {
if (response && response.status) {
handleStatusChange(response.status);
// If already connected/paused, auto-show the debugger pane
if (response.status === 'paused' || response.status === 'connected') {
const pane = document.querySelector('.dap-debugger-pane');
if (!pane) {
injectDebuggerPane();
const btn = document.querySelector('.dap-debug-btn');
if (btn) btn.classList.add('selected');
}
// If already paused, load the current debug state
if (response.status === 'paused') {
await loadCurrentDebugState();
}
}
}
});
}
// Initialize when DOM is ready
if (document.readyState === 'loading') {
document.addEventListener('DOMContentLoaded', init);
} else {
init();
}

View File

@@ -0,0 +1,135 @@
#!/usr/bin/env node
/**
* Create simple green circle PNG icons
* No dependencies required - uses pure JavaScript to create valid PNG files
*/
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
function createPNG(size) {
// PNG uses RGBA format, one pixel = 4 bytes
const pixelData = [];
const centerX = size / 2;
const centerY = size / 2;
const radius = size / 2 - 1;
const innerRadius = radius * 0.4;
for (let y = 0; y < size; y++) {
pixelData.push(0); // Filter byte for each row
for (let x = 0; x < size; x++) {
const dx = x - centerX;
const dy = y - centerY;
const dist = Math.sqrt(dx * dx + dy * dy);
if (dist <= radius) {
// Green circle (#238636)
pixelData.push(35, 134, 54, 255);
} else {
// Transparent
pixelData.push(0, 0, 0, 0);
}
}
}
// Add a white "bug" shape in the center
for (let y = 0; y < size; y++) {
for (let x = 0; x < size; x++) {
const dx = x - centerX;
const dy = y - centerY;
const dist = Math.sqrt(dx * dx + dy * dy);
// Bug body (oval)
const bodyDx = dx;
const bodyDy = (dy - size * 0.05) / 1.3;
const bodyDist = Math.sqrt(bodyDx * bodyDx + bodyDy * bodyDy);
// Bug head (circle above body)
const headDx = dx;
const headDy = dy + size * 0.15;
const headDist = Math.sqrt(headDx * headDx + headDy * headDy);
if (bodyDist < innerRadius || headDist < innerRadius * 0.6) {
const idx = 1 + y * (1 + size * 4) + x * 4;
pixelData[idx] = 255;
pixelData[idx + 1] = 255;
pixelData[idx + 2] = 255;
pixelData[idx + 3] = 255;
}
}
}
const rawData = Buffer.from(pixelData);
const compressed = zlib.deflateSync(rawData);
// Build PNG file
const chunks = [];
// PNG signature
chunks.push(Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]));
// IHDR chunk
const ihdr = Buffer.alloc(13);
ihdr.writeUInt32BE(size, 0); // width
ihdr.writeUInt32BE(size, 4); // height
ihdr.writeUInt8(8, 8); // bit depth
ihdr.writeUInt8(6, 9); // color type (RGBA)
ihdr.writeUInt8(0, 10); // compression
ihdr.writeUInt8(0, 11); // filter
ihdr.writeUInt8(0, 12); // interlace
chunks.push(createChunk('IHDR', ihdr));
// IDAT chunk
chunks.push(createChunk('IDAT', compressed));
// IEND chunk
chunks.push(createChunk('IEND', Buffer.alloc(0)));
return Buffer.concat(chunks);
}
function createChunk(type, data) {
const typeBuffer = Buffer.from(type);
const length = Buffer.alloc(4);
length.writeUInt32BE(data.length, 0);
const crcData = Buffer.concat([typeBuffer, data]);
const crc = Buffer.alloc(4);
crc.writeUInt32BE(crc32(crcData), 0);
return Buffer.concat([length, typeBuffer, data, crc]);
}
// CRC32 implementation
function crc32(buf) {
let crc = 0xffffffff;
for (let i = 0; i < buf.length; i++) {
crc = crc32Table[(crc ^ buf[i]) & 0xff] ^ (crc >>> 8);
}
return (crc ^ 0xffffffff) >>> 0;
}
// CRC32 lookup table
const crc32Table = new Uint32Array(256);
for (let i = 0; i < 256; i++) {
let c = i;
for (let j = 0; j < 8; j++) {
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1;
}
crc32Table[i] = c;
}
// Generate icons
const iconsDir = path.join(__dirname);
const sizes = [16, 48, 128];
sizes.forEach((size) => {
const png = createPNG(size);
const filename = `icon${size}.png`;
fs.writeFileSync(path.join(iconsDir, filename), png);
console.log(`Created ${filename} (${size}x${size})`);
});
console.log('Done!');

Binary file not shown.

After

Width:  |  Height:  |  Size: 872 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 258 B

32
browser-ext/manifest.json Normal file
View File

@@ -0,0 +1,32 @@
{
"manifest_version": 3,
"name": "Actions DAP Debugger",
"version": "0.1.0",
"description": "Debug GitHub Actions workflows with DAP - interactive debugging directly in the browser",
"permissions": ["activeTab", "storage"],
"host_permissions": ["https://github.com/*"],
"background": {
"service_worker": "background/background.js"
},
"content_scripts": [
{
"matches": ["https://github.com/*/*/actions/runs/*/job/*"],
"js": ["lib/dap-protocol.js", "content/content.js"],
"css": ["content/content.css"],
"run_at": "document_idle"
}
],
"action": {
"default_popup": "popup/popup.html",
"default_icon": {
"16": "icons/icon16.png",
"48": "icons/icon48.png",
"128": "icons/icon128.png"
}
},
"icons": {
"16": "icons/icon16.png",
"48": "icons/icon48.png",
"128": "icons/icon128.png"
}
}

228
browser-ext/popup/popup.css Normal file
View File

@@ -0,0 +1,228 @@
/**
* Popup Styles
*
* GitHub-inspired dark theme for the extension popup.
*/
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
width: 320px;
padding: 16px;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
font-size: 14px;
background-color: #0d1117;
color: #e6edf3;
}
h3 {
display: flex;
align-items: center;
gap: 8px;
margin: 0 0 16px 0;
font-size: 16px;
font-weight: 600;
}
h3 .icon {
flex-shrink: 0;
}
/* Status Section */
.status-section {
display: flex;
align-items: center;
margin-bottom: 16px;
padding: 12px;
background-color: #161b22;
border-radius: 6px;
border: 1px solid #30363d;
}
.status-indicator {
width: 10px;
height: 10px;
border-radius: 50%;
margin-right: 10px;
flex-shrink: 0;
}
.status-disconnected {
background-color: #6e7681;
}
.status-connecting {
background-color: #9e6a03;
animation: pulse 1.5s ease-in-out infinite;
}
.status-connected {
background-color: #238636;
}
.status-paused {
background-color: #9e6a03;
}
.status-running {
background-color: #238636;
animation: pulse 1.5s ease-in-out infinite;
}
.status-error {
background-color: #da3633;
}
@keyframes pulse {
0%, 100% {
opacity: 1;
}
50% {
opacity: 0.5;
}
}
#status-text {
font-weight: 500;
}
/* Config Section */
.config-section {
margin-bottom: 16px;
}
.config-section label {
display: block;
margin-bottom: 12px;
font-size: 12px;
font-weight: 500;
color: #8b949e;
}
.config-section input {
display: block;
width: 100%;
padding: 8px 12px;
margin-top: 6px;
background-color: #0d1117;
border: 1px solid #30363d;
border-radius: 6px;
color: #e6edf3;
font-size: 14px;
}
.config-section input:focus {
border-color: #1f6feb;
outline: none;
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
}
.config-section input:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.config-hint {
font-size: 11px;
color: #6e7681;
margin-top: 4px;
}
/* Actions Section */
.actions-section {
display: flex;
gap: 8px;
margin-bottom: 16px;
}
button {
flex: 1;
padding: 10px 16px;
border: none;
border-radius: 6px;
font-size: 14px;
font-weight: 500;
cursor: pointer;
transition: background-color 0.15s ease;
}
button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.btn-primary {
background-color: #238636;
color: white;
}
.btn-primary:hover:not(:disabled) {
background-color: #2ea043;
}
.btn-secondary {
background-color: #21262d;
color: #e6edf3;
border: 1px solid #30363d;
}
.btn-secondary:hover:not(:disabled) {
background-color: #30363d;
}
/* Help Section */
.help-section {
font-size: 12px;
color: #8b949e;
background-color: #161b22;
border: 1px solid #30363d;
border-radius: 6px;
padding: 12px;
margin-bottom: 12px;
}
.help-section p {
margin: 6px 0;
line-height: 1.5;
}
.help-section p:first-child {
margin-top: 0;
}
.help-section strong {
color: #e6edf3;
}
.help-section code {
display: block;
background-color: #0d1117;
padding: 8px;
border-radius: 4px;
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;
font-size: 11px;
overflow-x: auto;
margin: 8px 0;
white-space: nowrap;
}
/* Footer */
.footer {
text-align: center;
padding-top: 8px;
border-top: 1px solid #21262d;
}
.footer a {
color: #58a6ff;
text-decoration: none;
font-size: 12px;
}
.footer a:hover {
text-decoration: underline;
}

View File

@@ -0,0 +1,52 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<link rel="stylesheet" href="popup.css">
</head>
<body>
<div class="popup-container">
<h3>
<svg class="icon" viewBox="0 0 16 16" width="16" height="16">
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
</svg>
Actions DAP Debugger
</h3>
<div class="status-section">
<div class="status-indicator" id="status-indicator"></div>
<span id="status-text">Disconnected</span>
</div>
<div class="config-section">
<label>
Proxy URL
<input type="text" id="proxy-url" value="ws://localhost:4712"
placeholder="ws://localhost:4712 or wss://...">
</label>
<p class="config-hint">For codespaces, use the forwarded URL (wss://...)</p>
</div>
<div class="actions-section">
<button id="connect-btn" class="btn-primary">Connect</button>
<button id="disconnect-btn" class="btn-secondary" disabled>Disconnect</button>
</div>
<div class="help-section">
<p><strong>Quick Start:</strong></p>
<p>1. Start the proxy:</p>
<code>cd browser-ext/proxy && npm install && node proxy.js</code>
<p>2. Re-run your GitHub Actions job with "Enable debug logging"</p>
<p>3. Click Connect when the job is waiting for debugger</p>
</div>
<div class="footer">
<a href="https://github.com/actions/runner" target="_blank">Documentation</a>
</div>
</div>
<script src="popup.js"></script>
</body>
</html>

View File

@@ -0,0 +1,95 @@
/**
* Popup Script
*
* Handles extension popup UI and connection management.
*/
document.addEventListener('DOMContentLoaded', () => {
const statusIndicator = document.getElementById('status-indicator');
const statusText = document.getElementById('status-text');
const connectBtn = document.getElementById('connect-btn');
const disconnectBtn = document.getElementById('disconnect-btn');
const urlInput = document.getElementById('proxy-url');
// Load saved config
chrome.storage.local.get(['proxyUrl'], (data) => {
if (data.proxyUrl) urlInput.value = data.proxyUrl;
});
// Get current status from background
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
if (response) {
updateStatusUI(response.status, response.reconnecting);
}
});
// Listen for status changes
chrome.runtime.onMessage.addListener((message) => {
if (message.type === 'status-changed') {
updateStatusUI(message.status, message.reconnecting);
}
});
// Connect button
connectBtn.addEventListener('click', () => {
const url = urlInput.value.trim() || 'ws://localhost:4712';
// Save config
chrome.storage.local.set({ proxyUrl: url });
// Update UI immediately
updateStatusUI('connecting');
// Connect
chrome.runtime.sendMessage({ type: 'connect', url }, (response) => {
if (response && response.status) {
updateStatusUI(response.status);
}
});
});
// Disconnect button
disconnectBtn.addEventListener('click', () => {
chrome.runtime.sendMessage({ type: 'disconnect' }, (response) => {
if (response && response.status) {
updateStatusUI(response.status);
}
});
});
/**
* Update the UI to reflect current status
*/
function updateStatusUI(status, reconnecting = false) {
// Update text
const statusNames = {
disconnected: 'Disconnected',
connecting: reconnecting ? 'Reconnecting...' : 'Connecting...',
connected: 'Connected',
paused: 'Paused',
running: 'Running',
error: 'Connection Error',
};
statusText.textContent = statusNames[status] || status;
// Update indicator color
statusIndicator.className = 'status-indicator status-' + status;
// Update button states
const isConnected = ['connected', 'paused', 'running'].includes(status);
const isConnecting = status === 'connecting';
connectBtn.disabled = isConnected || isConnecting;
disconnectBtn.disabled = status === 'disconnected';
// Update connect button text
if (isConnecting) {
connectBtn.textContent = reconnecting ? 'Reconnecting...' : 'Connecting...';
} else {
connectBtn.textContent = 'Connect';
}
// Disable inputs when connected
urlInput.disabled = isConnected || isConnecting;
}
});

36
browser-ext/proxy/package-lock.json generated Normal file
View File

@@ -0,0 +1,36 @@
{
"name": "dap-websocket-proxy",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "dap-websocket-proxy",
"version": "1.0.0",
"dependencies": {
"ws": "^8.16.0"
}
},
"node_modules/ws": {
"version": "8.19.0",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
"integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
"license": "MIT",
"engines": {
"node": ">=10.0.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": ">=5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "dap-websocket-proxy",
"version": "1.0.0",
"description": "WebSocket-to-TCP bridge for DAP debugging",
"main": "proxy.js",
"scripts": {
"start": "node proxy.js"
},
"dependencies": {
"ws": "^8.16.0"
}
}

207
browser-ext/proxy/proxy.js Normal file
View File

@@ -0,0 +1,207 @@
/**
* DAP WebSocket-to-TCP Proxy
*
* Bridges WebSocket connections from browser extensions to the DAP TCP server.
* Handles DAP message framing (Content-Length headers).
*
* Usage: node proxy.js [--ws-port 4712] [--dap-host 127.0.0.1] [--dap-port 4711]
*/
const WebSocket = require('ws');
const net = require('net');
// Configuration (can be overridden via CLI args)
const config = {
wsPort: parseInt(process.env.WS_PORT) || 4712,
dapHost: process.env.DAP_HOST || '127.0.0.1',
dapPort: parseInt(process.env.DAP_PORT) || 4711,
};
// Parse CLI arguments
for (let i = 2; i < process.argv.length; i++) {
switch (process.argv[i]) {
case '--ws-port':
config.wsPort = parseInt(process.argv[++i]);
break;
case '--dap-host':
config.dapHost = process.argv[++i];
break;
case '--dap-port':
config.dapPort = parseInt(process.argv[++i]);
break;
}
}
console.log(`[Proxy] Starting WebSocket-to-TCP proxy`);
console.log(`[Proxy] WebSocket: ws://localhost:${config.wsPort}`);
console.log(`[Proxy] DAP Server: tcp://${config.dapHost}:${config.dapPort}`);
const wss = new WebSocket.Server({
port: config.wsPort,
// Enable ping/pong for connection health checks
clientTracking: true,
});
console.log(`[Proxy] WebSocket server listening on port ${config.wsPort}`);
// Ping all clients every 25 seconds to detect dead connections
// This is shorter than Chrome's service worker timeout (~30s)
const PING_INTERVAL = 25000;
const pingInterval = setInterval(() => {
wss.clients.forEach((ws) => {
if (ws.isAlive === false) {
console.log(`[Proxy] Client failed to respond to ping, terminating`);
return ws.terminate();
}
ws.isAlive = false;
ws.ping();
});
}, PING_INTERVAL);
wss.on('connection', (ws, req) => {
const clientId = `${req.socket.remoteAddress}:${req.socket.remotePort}`;
console.log(`[Proxy] WebSocket client connected: ${clientId}`);
// Mark as alive for ping/pong tracking
ws.isAlive = true;
ws.on('pong', () => {
ws.isAlive = true;
});
// Connect to DAP TCP server
const tcp = net.createConnection({
host: config.dapHost,
port: config.dapPort,
});
let tcpBuffer = '';
let tcpConnected = false;
tcp.on('connect', () => {
tcpConnected = true;
console.log(`[Proxy] Connected to DAP server at ${config.dapHost}:${config.dapPort}`);
});
tcp.on('error', (err) => {
console.error(`[Proxy] TCP error: ${err.message}`);
if (ws.readyState === WebSocket.OPEN) {
ws.send(
JSON.stringify({
type: 'proxy-error',
message: `Failed to connect to DAP server: ${err.message}`,
})
);
ws.close(1011, 'DAP server connection failed');
}
});
tcp.on('close', () => {
console.log(`[Proxy] TCP connection closed`);
if (ws.readyState === WebSocket.OPEN) {
ws.close(1000, 'DAP server disconnected');
}
});
// WebSocket → TCP: Add Content-Length framing
ws.on('message', (data) => {
const json = data.toString();
try {
// Validate it's valid JSON
const parsed = JSON.parse(json);
// Handle keepalive messages from the browser extension - don't forward to DAP server
if (parsed.type === 'keepalive') {
console.log(`[Proxy] Keepalive received from client`);
// Respond with a keepalive-ack to confirm the connection is alive
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'keepalive-ack', timestamp: Date.now() }));
}
return;
}
if (!tcpConnected) {
console.warn(`[Proxy] TCP not connected, dropping message`);
return;
}
console.log(`[Proxy] WS→TCP: ${parsed.command || parsed.event || 'message'}`);
// Add DAP framing
const framed = `Content-Length: ${Buffer.byteLength(json)}\r\n\r\n${json}`;
tcp.write(framed);
} catch (err) {
console.error(`[Proxy] Invalid JSON from WebSocket: ${err.message}`);
}
});
// TCP → WebSocket: Parse Content-Length framing
tcp.on('data', (chunk) => {
tcpBuffer += chunk.toString();
// Process complete DAP messages from buffer
while (true) {
// Look for Content-Length header
const headerEnd = tcpBuffer.indexOf('\r\n\r\n');
if (headerEnd === -1) break;
const header = tcpBuffer.substring(0, headerEnd);
const match = header.match(/Content-Length:\s*(\d+)/i);
if (!match) {
console.error(`[Proxy] Invalid DAP header: ${header}`);
tcpBuffer = tcpBuffer.substring(headerEnd + 4);
continue;
}
const contentLength = parseInt(match[1]);
const messageStart = headerEnd + 4;
const messageEnd = messageStart + contentLength;
// Check if we have the complete message
if (tcpBuffer.length < messageEnd) break;
// Extract the JSON message
const json = tcpBuffer.substring(messageStart, messageEnd);
tcpBuffer = tcpBuffer.substring(messageEnd);
// Send to WebSocket
try {
const parsed = JSON.parse(json);
console.log(
`[Proxy] TCP→WS: ${parsed.type} ${parsed.command || parsed.event || ''} ${parsed.request_seq ? `(req_seq: ${parsed.request_seq})` : ''}`
);
if (ws.readyState === WebSocket.OPEN) {
ws.send(json);
}
} catch (err) {
console.error(`[Proxy] Invalid JSON from TCP: ${err.message}`);
}
}
});
// Handle WebSocket close
ws.on('close', (code, reason) => {
console.log(`[Proxy] WebSocket closed: ${code} ${reason}`);
tcp.end();
});
ws.on('error', (err) => {
console.error(`[Proxy] WebSocket error: ${err.message}`);
tcp.end();
});
});
wss.on('error', (err) => {
console.error(`[Proxy] WebSocket server error: ${err.message}`);
});
// Graceful shutdown
process.on('SIGINT', () => {
console.log(`\n[Proxy] Shutting down...`);
clearInterval(pingInterval);
wss.clients.forEach((ws) => ws.close(1001, 'Server shutting down'));
wss.close(() => {
console.log(`[Proxy] Goodbye!`);
process.exit(0);
});
});

View File

@@ -1,12 +1,12 @@
# Source: https://github.com/dotnet/dotnet-docker
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy AS build
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble AS build
ARG TARGETOS
ARG TARGETARCH
ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
ARG DOCKER_VERSION=28.5.1
ARG BUILDX_VERSION=0.29.1
ARG DOCKER_VERSION=29.0.2
ARG BUILDX_VERSION=0.30.1
RUN apt update -y && apt install curl unzip -y
@@ -33,15 +33,15 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
&& rm -rf docker.tgz \
&& mkdir -p /usr/local/lib/docker/cli-plugins \
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
ENV ImageOS=ubuntu22
ENV ImageOS=ubuntu24
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
RUN apt update -y \
@@ -59,7 +59,8 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
&& chmod 777 /home/runner
WORKDIR /home/runner

View File

@@ -1,43 +1,27 @@
## What's Changed
* Update safe_sleep.sh for bug when scheduler is paused for more than 1 second by @horner in https://github.com/actions/runner/pull/3157
* Acknowledge runner request by @ericsciple in https://github.com/actions/runner/pull/3996
* Update Docker to v28.3.3 and Buildx to v0.27.0 by @github-actions[bot] in https://github.com/actions/runner/pull/3999
* Update dotnet sdk to latest version @8.0.413 by @github-actions[bot] in https://github.com/actions/runner/pull/4000
* Bump actions/attest-build-provenance from 2 to 3 by @dependabot[bot] in https://github.com/actions/runner/pull/4002
* Bump @typescript-eslint/eslint-plugin from 6.7.2 to 8.35.0 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3920
* Bump husky from 8.0.3 to 9.1.7 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3842
* Bump @vercel/ncc from 0.38.0 to 0.38.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3841
* Bump eslint-plugin-github from 4.10.0 to 4.10.2 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/3180
* Bump typescript from 5.2.2 to 5.9.2 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4007
* chore: migrate Husky config from v8 to v9 format by @salmanmkc in https://github.com/actions/runner/pull/4003
* Map RUNNER_TEMP for container action by @ericsciple in https://github.com/actions/runner/pull/4011
* Break UseV2Flow into UseV2Flow and UseRunnerAdminFlow. by @TingluoHuang in https://github.com/actions/runner/pull/4013
* Update Docker to v28.4.0 and Buildx to v0.28.0 by @github-actions[bot] in https://github.com/actions/runner/pull/4020
* Bump node.js to latest version in runner. by @TingluoHuang in https://github.com/actions/runner/pull/4022
* feat: add automated .NET dependency management workflow by @salmanmkc in https://github.com/actions/runner/pull/4028
* feat: add automated Docker BuildX dependency management workflow by @salmanmkc in https://github.com/actions/runner/pull/4029
* feat: add automated Node.js version management workflow by @salmanmkc in https://github.com/actions/runner/pull/4026
* feat: add comprehensive NPM security management workflow by @salmanmkc in https://github.com/actions/runner/pull/4027
* feat: add comprehensive dependency monitoring system by @salmanmkc in https://github.com/actions/runner/pull/4025
* Use BrokerURL when using RunnerAdmin by @luketomlinson in https://github.com/actions/runner/pull/4044
* Bump actions/github-script from 7.0.1 to 8.0.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4016
* Bump actions/stale from 9 to 10 by @dependabot[bot] in https://github.com/actions/runner/pull/4015
* fix: prevent Node.js upgrade workflow from creating PRs with empty versions by @salmanmkc in https://github.com/actions/runner/pull/4055
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4057
* Bump actions/setup-node from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/4037
* Bump Azure.Storage.Blobs from 12.25.0 to 12.25.1 by @dependabot[bot] in https://github.com/actions/runner/pull/4058
* Update Docker to v28.5.0 and Buildx to v0.29.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4069
* Bump github/codeql-action from 3 to 4 by @dependabot[bot] in https://github.com/actions/runner/pull/4072
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4075
* Include k8s novolume (version v0.8.0) by @nikola-jokic in https://github.com/actions/runner/pull/4063
* Make sure runner-admin has both auth_url and auth_url_v2. by @TingluoHuang in https://github.com/actions/runner/pull/4066
* Report job has infra failure to run-service by @TingluoHuang in https://github.com/actions/runner/pull/4073
* Bump actions/setup-node from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4078
* Fix owner of /home/runner directory by @nikola-jokic in https://github.com/actions/runner/pull/4132
* Update Docker to v29.0.2 and Buildx to v0.30.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4135
* Update workflow around runner docker image. by @TingluoHuang in https://github.com/actions/runner/pull/4133
* Fix regex for validating runner version format by @TingluoHuang in https://github.com/actions/runner/pull/4136
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4144
* Ensure safe_sleep tries alternative approaches by @TingluoHuang in https://github.com/actions/runner/pull/4146
* Bump actions/github-script from 7 to 8 by @dependabot[bot] in https://github.com/actions/runner/pull/4137
* Bump actions/checkout from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4130
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4149
* Bump docker image to use ubuntu 24.04 by @TingluoHuang in https://github.com/actions/runner/pull/4018
* Add support for case function by @AllanGuigou in https://github.com/actions/runner/pull/4147
* Cleanup feature flag actions_container_action_runner_temp by @ericsciple in https://github.com/actions/runner/pull/4163
* Bump actions/download-artifact from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4155
* Bump actions/upload-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4157
* Set ACTIONS_ORCHESTRATION_ID as env to actions. by @TingluoHuang in https://github.com/actions/runner/pull/4178
* Allow hosted VM report job telemetry via .setup_info file. by @TingluoHuang in https://github.com/actions/runner/pull/4186
* Bump typescript from 5.9.2 to 5.9.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4184
* Bump Azure.Storage.Blobs from 12.26.0 to 12.27.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4189
## New Contributors
* @horner made their first contribution in https://github.com/actions/runner/pull/3157
* @AllanGuigou made their first contribution in https://github.com/actions/runner/pull/4147
**Full Changelog**: https://github.com/actions/runner/compare/v2.328.0...v2.329.0
**Full Changelog**: https://github.com/actions/runner/compare/v2.330.0...v2.331.0
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1,5 +1,5 @@
{
"plugins": ["@typescript-eslint"],
"plugins": ["@typescript-eslint", "@stylistic"],
"extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
@@ -26,7 +26,7 @@
],
"camelcase": "off",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@stylistic/func-call-spacing": ["error", "never"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
@@ -47,8 +47,8 @@
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@stylistic/semi": ["error", "never"],
"@stylistic/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning

File diff suppressed because it is too large Load Diff

View File

@@ -35,9 +35,10 @@
"@actions/glob": "^0.4.0"
},
"devDependencies": {
"@types/node": "^20.6.2",
"@typescript-eslint/eslint-plugin": "^6.21.0",
"@typescript-eslint/parser": "^6.7.2",
"@stylistic/eslint-plugin": "^3.1.0",
"@types/node": "^22.0.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vercel/ncc": "^0.38.3",
"eslint": "^8.47.0",
"eslint-plugin-github": "^4.10.2",
@@ -45,6 +46,6 @@
"husky": "^9.1.7",
"lint-staged": "^15.5.0",
"prettier": "^3.0.3",
"typescript": "^5.9.2"
"typescript": "^5.9.3"
}
}

View File

@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
NODE20_VERSION="20.19.5"
NODE24_VERSION="24.11.0"
NODE20_VERSION="20.19.6"
NODE24_VERSION="24.12.0"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args

View File

@@ -1,6 +1,6 @@
[Unit]
Description={{Description}}
After=network.target
After=network-online.target
[Service]
ExecStart={{RunnerRoot}}/runsvc.sh

View File

@@ -1,7 +1,7 @@
/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ 2627:
/***/ 4711:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -22,13 +22,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@@ -46,15 +56,15 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const crypto = __importStar(__nccwpck_require__(6113));
const fs = __importStar(__nccwpck_require__(7147));
const glob = __importStar(__nccwpck_require__(8090));
const path = __importStar(__nccwpck_require__(1017));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const crypto = __importStar(__nccwpck_require__(6982));
const fs = __importStar(__nccwpck_require__(9896));
const glob = __importStar(__nccwpck_require__(7206));
const path = __importStar(__nccwpck_require__(6928));
const stream = __importStar(__nccwpck_require__(2203));
const util = __importStar(__nccwpck_require__(9023));
function run() {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
// arg0 -> node
// arg1 -> hashFiles.js
// env[followSymbolicLinks] = true/null
@@ -128,7 +138,7 @@ function run() {
/***/ }),
/***/ 7351:
/***/ 4914:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -154,8 +164,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issue = exports.issueCommand = void 0;
const os = __importStar(__nccwpck_require__(2037));
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(857));
const utils_1 = __nccwpck_require__(302);
/**
* Commands
*
@@ -227,7 +237,7 @@ function escapeProperty(s) {
/***/ }),
/***/ 2186:
/***/ 7484:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -262,12 +272,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
const command_1 = __nccwpck_require__(7351);
const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const oidc_utils_1 = __nccwpck_require__(8041);
const command_1 = __nccwpck_require__(4914);
const file_command_1 = __nccwpck_require__(4753);
const utils_1 = __nccwpck_require__(302);
const os = __importStar(__nccwpck_require__(857));
const path = __importStar(__nccwpck_require__(6928));
const oidc_utils_1 = __nccwpck_require__(5306);
/**
* The code to exit an action
*/
@@ -552,17 +562,17 @@ exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = __nccwpck_require__(1327);
var summary_1 = __nccwpck_require__(1847);
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
/**
* @deprecated use core.summary
*/
var summary_2 = __nccwpck_require__(1327);
var summary_2 = __nccwpck_require__(1847);
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
/**
* Path exports
*/
var path_utils_1 = __nccwpck_require__(2981);
var path_utils_1 = __nccwpck_require__(1976);
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
@@ -570,7 +580,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct
/***/ }),
/***/ 717:
/***/ 4753:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -599,10 +609,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(5840);
const utils_1 = __nccwpck_require__(5278);
const fs = __importStar(__nccwpck_require__(9896));
const os = __importStar(__nccwpck_require__(857));
const uuid_1 = __nccwpck_require__(2048);
const utils_1 = __nccwpck_require__(302);
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
@@ -635,7 +645,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage;
/***/ }),
/***/ 8041:
/***/ 5306:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -651,9 +661,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OidcClient = void 0;
const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(5526);
const core_1 = __nccwpck_require__(2186);
const http_client_1 = __nccwpck_require__(4844);
const auth_1 = __nccwpck_require__(4552);
const core_1 = __nccwpck_require__(7484);
class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) {
const requestOptions = {
@@ -719,7 +729,7 @@ exports.OidcClient = OidcClient;
/***/ }),
/***/ 2981:
/***/ 1976:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -745,7 +755,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(__nccwpck_require__(1017));
const path = __importStar(__nccwpck_require__(6928));
/**
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
@@ -784,7 +794,7 @@ exports.toPlatformPath = toPlatformPath;
/***/ }),
/***/ 1327:
/***/ 1847:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -800,8 +810,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = __nccwpck_require__(2037);
const fs_1 = __nccwpck_require__(7147);
const os_1 = __nccwpck_require__(857);
const fs_1 = __nccwpck_require__(9896);
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
@@ -1074,7 +1084,7 @@ exports.summary = _summary;
/***/ }),
/***/ 5278:
/***/ 302:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -1121,7 +1131,7 @@ exports.toCommandProperties = toCommandProperties;
/***/ }),
/***/ 8090:
/***/ 7206:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1137,8 +1147,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = exports.create = void 0;
const internal_globber_1 = __nccwpck_require__(8298);
const internal_hash_files_1 = __nccwpck_require__(2448);
const internal_globber_1 = __nccwpck_require__(103);
const internal_hash_files_1 = __nccwpck_require__(3608);
/**
* Constructs a globber
*
@@ -1174,7 +1184,7 @@ exports.hashFiles = hashFiles;
/***/ }),
/***/ 1026:
/***/ 8164:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1200,7 +1210,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOptions = void 0;
const core = __importStar(__nccwpck_require__(2186));
const core = __importStar(__nccwpck_require__(7484));
/**
* Returns a copy with defaults filled in.
*/
@@ -1236,7 +1246,7 @@ exports.getOptions = getOptions;
/***/ }),
/***/ 8298:
/***/ 103:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1290,14 +1300,14 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DefaultGlobber = void 0;
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
const path = __importStar(__nccwpck_require__(1017));
const patternHelper = __importStar(__nccwpck_require__(9005));
const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_pattern_1 = __nccwpck_require__(4536);
const internal_search_state_1 = __nccwpck_require__(9117);
const core = __importStar(__nccwpck_require__(7484));
const fs = __importStar(__nccwpck_require__(9896));
const globOptionsHelper = __importStar(__nccwpck_require__(8164));
const path = __importStar(__nccwpck_require__(6928));
const patternHelper = __importStar(__nccwpck_require__(8891));
const internal_match_kind_1 = __nccwpck_require__(2644);
const internal_pattern_1 = __nccwpck_require__(5370);
const internal_search_state_1 = __nccwpck_require__(9890);
const IS_WINDOWS = process.platform === 'win32';
class DefaultGlobber {
constructor(options) {
@@ -1478,7 +1488,7 @@ exports.DefaultGlobber = DefaultGlobber;
/***/ }),
/***/ 2448:
/***/ 3608:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1520,12 +1530,12 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = void 0;
const crypto = __importStar(__nccwpck_require__(6113));
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const path = __importStar(__nccwpck_require__(1017));
const crypto = __importStar(__nccwpck_require__(6982));
const core = __importStar(__nccwpck_require__(7484));
const fs = __importStar(__nccwpck_require__(9896));
const stream = __importStar(__nccwpck_require__(2203));
const util = __importStar(__nccwpck_require__(9023));
const path = __importStar(__nccwpck_require__(6928));
function hashFiles(globber, currentWorkspace, verbose = false) {
var e_1, _a;
var _b;
@@ -1582,7 +1592,7 @@ exports.hashFiles = hashFiles;
/***/ }),
/***/ 1063:
/***/ 2644:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -1607,7 +1617,7 @@ var MatchKind;
/***/ }),
/***/ 1849:
/***/ 4138:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1636,8 +1646,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
const path = __importStar(__nccwpck_require__(1017));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const path = __importStar(__nccwpck_require__(6928));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const IS_WINDOWS = process.platform === 'win32';
/**
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
@@ -1812,7 +1822,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
/***/ }),
/***/ 6836:
/***/ 6617:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1841,9 +1851,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Path = void 0;
const path = __importStar(__nccwpck_require__(1017));
const pathHelper = __importStar(__nccwpck_require__(1849));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const path = __importStar(__nccwpck_require__(6928));
const pathHelper = __importStar(__nccwpck_require__(4138));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const IS_WINDOWS = process.platform === 'win32';
/**
* Helper class for parsing paths into segments
@@ -1932,7 +1942,7 @@ exports.Path = Path;
/***/ }),
/***/ 9005:
/***/ 8891:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1958,8 +1968,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
const pathHelper = __importStar(__nccwpck_require__(1849));
const internal_match_kind_1 = __nccwpck_require__(1063);
const pathHelper = __importStar(__nccwpck_require__(4138));
const internal_match_kind_1 = __nccwpck_require__(2644);
const IS_WINDOWS = process.platform === 'win32';
/**
* Given an array of patterns, returns an array of paths to search.
@@ -2033,7 +2043,7 @@ exports.partialMatch = partialMatch;
/***/ }),
/***/ 4536:
/***/ 5370:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -2062,13 +2072,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Pattern = void 0;
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const pathHelper = __importStar(__nccwpck_require__(1849));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const minimatch_1 = __nccwpck_require__(3973);
const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_path_1 = __nccwpck_require__(6836);
const os = __importStar(__nccwpck_require__(857));
const path = __importStar(__nccwpck_require__(6928));
const pathHelper = __importStar(__nccwpck_require__(4138));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const minimatch_1 = __nccwpck_require__(3772);
const internal_match_kind_1 = __nccwpck_require__(2644);
const internal_path_1 = __nccwpck_require__(6617);
const IS_WINDOWS = process.platform === 'win32';
class Pattern {
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
@@ -2295,7 +2305,7 @@ exports.Pattern = Pattern;
/***/ }),
/***/ 9117:
/***/ 9890:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -2313,7 +2323,7 @@ exports.SearchState = SearchState;
/***/ }),
/***/ 5526:
/***/ 4552:
/***/ (function(__unused_webpack_module, exports) {
"use strict";
@@ -2401,7 +2411,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
/***/ }),
/***/ 6255:
/***/ 4844:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -2437,10 +2447,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
const http = __importStar(__nccwpck_require__(8611));
const https = __importStar(__nccwpck_require__(5692));
const pm = __importStar(__nccwpck_require__(4988));
const tunnel = __importStar(__nccwpck_require__(770));
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
@@ -3026,7 +3036,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
/***/ }),
/***/ 9835:
/***/ 4988:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -3115,7 +3125,7 @@ function isLoopbackAddress(host) {
/***/ }),
/***/ 9417:
/***/ 9380:
/***/ ((module) => {
"use strict";
@@ -3185,11 +3195,11 @@ function range(a, b, str) {
/***/ }),
/***/ 3717:
/***/ 4691:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var concatMap = __nccwpck_require__(6891);
var balanced = __nccwpck_require__(9417);
var concatMap = __nccwpck_require__(7087);
var balanced = __nccwpck_require__(9380);
module.exports = expandTop;
@@ -3393,7 +3403,7 @@ function expand(str, isTop) {
/***/ }),
/***/ 6891:
/***/ 7087:
/***/ ((module) => {
module.exports = function (xs, fn) {
@@ -3413,19 +3423,19 @@ var isArray = Array.isArray || function (xs) {
/***/ }),
/***/ 3973:
/***/ 3772:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = minimatch
minimatch.Minimatch = Minimatch
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
sep: '/'
}
minimatch.sep = path.sep
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
var expand = __nccwpck_require__(3717)
var expand = __nccwpck_require__(4691)
var plTypes = {
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
@@ -4367,27 +4377,27 @@ function regExpEscape (s) {
/***/ }),
/***/ 4294:
/***/ 770:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = __nccwpck_require__(4219);
module.exports = __nccwpck_require__(218);
/***/ }),
/***/ 4219:
/***/ 218:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
var net = __nccwpck_require__(1808);
var tls = __nccwpck_require__(4404);
var http = __nccwpck_require__(3685);
var https = __nccwpck_require__(5687);
var events = __nccwpck_require__(2361);
var assert = __nccwpck_require__(9491);
var util = __nccwpck_require__(3837);
var net = __nccwpck_require__(9278);
var tls = __nccwpck_require__(4756);
var http = __nccwpck_require__(8611);
var https = __nccwpck_require__(5692);
var events = __nccwpck_require__(4434);
var assert = __nccwpck_require__(2613);
var util = __nccwpck_require__(9023);
exports.httpOverHttp = httpOverHttp;
@@ -4647,7 +4657,7 @@ exports.debug = debug; // for test
/***/ }),
/***/ 5840:
/***/ 2048:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4711,29 +4721,29 @@ Object.defineProperty(exports, "parse", ({
}
}));
var _v = _interopRequireDefault(__nccwpck_require__(8628));
var _v = _interopRequireDefault(__nccwpck_require__(6415));
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
var _v2 = _interopRequireDefault(__nccwpck_require__(1697));
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
var _v3 = _interopRequireDefault(__nccwpck_require__(4676));
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
var _v4 = _interopRequireDefault(__nccwpck_require__(9771));
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
var _nil = _interopRequireDefault(__nccwpck_require__(7723));
var _version = _interopRequireDefault(__nccwpck_require__(1595));
var _version = _interopRequireDefault(__nccwpck_require__(5868));
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/***/ }),
/***/ 4569:
/***/ 216:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4744,7 +4754,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4763,7 +4773,7 @@ exports["default"] = _default;
/***/ }),
/***/ 5332:
/***/ 7723:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -4778,7 +4788,7 @@ exports["default"] = _default;
/***/ }),
/***/ 2746:
/***/ 7267:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4789,7 +4799,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4830,7 +4840,7 @@ exports["default"] = _default;
/***/ }),
/***/ 814:
/***/ 7879:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -4845,7 +4855,7 @@ exports["default"] = _default;
/***/ }),
/***/ 807:
/***/ 2973:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4856,7 +4866,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = rng;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4876,7 +4886,7 @@ function rng() {
/***/ }),
/***/ 5274:
/***/ 507:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4887,7 +4897,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4906,7 +4916,7 @@ exports["default"] = _default;
/***/ }),
/***/ 8950:
/***/ 7597:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4917,7 +4927,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4952,7 +4962,7 @@ exports["default"] = _default;
/***/ }),
/***/ 8628:
/***/ 6415:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4963,9 +4973,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(807));
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5066,7 +5076,7 @@ exports["default"] = _default;
/***/ }),
/***/ 6409:
/***/ 1697:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5077,9 +5087,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(5998));
var _v = _interopRequireDefault(__nccwpck_require__(2930));
var _md = _interopRequireDefault(__nccwpck_require__(4569));
var _md = _interopRequireDefault(__nccwpck_require__(216));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5089,7 +5099,7 @@ exports["default"] = _default;
/***/ }),
/***/ 5998:
/***/ 2930:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5101,9 +5111,9 @@ Object.defineProperty(exports, "__esModule", ({
exports["default"] = _default;
exports.URL = exports.DNS = void 0;
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5174,7 +5184,7 @@ function _default(name, version, hashfunc) {
/***/ }),
/***/ 5122:
/***/ 4676:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5185,9 +5195,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(807));
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5218,7 +5228,7 @@ exports["default"] = _default;
/***/ }),
/***/ 9120:
/***/ 9771:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5229,9 +5239,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(5998));
var _v = _interopRequireDefault(__nccwpck_require__(2930));
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
var _sha = _interopRequireDefault(__nccwpck_require__(507));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5241,7 +5251,7 @@ exports["default"] = _default;
/***/ }),
/***/ 6900:
/***/ 6200:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5252,7 +5262,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _regex = _interopRequireDefault(__nccwpck_require__(814));
var _regex = _interopRequireDefault(__nccwpck_require__(7879));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5265,7 +5275,7 @@ exports["default"] = _default;
/***/ }),
/***/ 1595:
/***/ 5868:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5276,7 +5286,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5293,7 +5303,7 @@ exports["default"] = _default;
/***/ }),
/***/ 9491:
/***/ 2613:
/***/ ((module) => {
"use strict";
@@ -5301,7 +5311,7 @@ module.exports = require("assert");
/***/ }),
/***/ 6113:
/***/ 6982:
/***/ ((module) => {
"use strict";
@@ -5309,7 +5319,7 @@ module.exports = require("crypto");
/***/ }),
/***/ 2361:
/***/ 4434:
/***/ ((module) => {
"use strict";
@@ -5317,7 +5327,7 @@ module.exports = require("events");
/***/ }),
/***/ 7147:
/***/ 9896:
/***/ ((module) => {
"use strict";
@@ -5325,7 +5335,7 @@ module.exports = require("fs");
/***/ }),
/***/ 3685:
/***/ 8611:
/***/ ((module) => {
"use strict";
@@ -5333,7 +5343,7 @@ module.exports = require("http");
/***/ }),
/***/ 5687:
/***/ 5692:
/***/ ((module) => {
"use strict";
@@ -5341,7 +5351,7 @@ module.exports = require("https");
/***/ }),
/***/ 1808:
/***/ 9278:
/***/ ((module) => {
"use strict";
@@ -5349,7 +5359,7 @@ module.exports = require("net");
/***/ }),
/***/ 2037:
/***/ 857:
/***/ ((module) => {
"use strict";
@@ -5357,7 +5367,7 @@ module.exports = require("os");
/***/ }),
/***/ 1017:
/***/ 6928:
/***/ ((module) => {
"use strict";
@@ -5365,7 +5375,7 @@ module.exports = require("path");
/***/ }),
/***/ 2781:
/***/ 2203:
/***/ ((module) => {
"use strict";
@@ -5373,7 +5383,7 @@ module.exports = require("stream");
/***/ }),
/***/ 4404:
/***/ 4756:
/***/ ((module) => {
"use strict";
@@ -5381,7 +5391,7 @@ module.exports = require("tls");
/***/ }),
/***/ 3837:
/***/ 9023:
/***/ ((module) => {
"use strict";
@@ -5431,7 +5441,7 @@ module.exports = require("util");
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var __webpack_exports__ = __nccwpck_require__(2627);
/******/ var __webpack_exports__ = __nccwpck_require__(4711);
/******/ module.exports = __webpack_exports__;
/******/
/******/ })()

View File

@@ -110,7 +110,7 @@ then
exit 1
fi
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
if [ $? -ne 0 ]
then
echo "'$apt_get' failed with exit code '$?'"

View File

@@ -1,5 +1,36 @@
#!/bin/bash
# try to use sleep if available
if [ -x "$(command -v sleep)" ]; then
sleep "$1"
exit 0
fi
# try to use ping if available
if [ -x "$(command -v ping)" ]; then
ping -c $(( $1 + 1 )) 127.0.0.1 > /dev/null
exit 0
fi
# try to use read -t from stdin/stdout/stderr if we are in bash
if [ -n "$BASH_VERSION" ]; then
if command -v read >/dev/null 2>&1; then
if [ -t 0 ]; then
read -t "$1" -u 0 || :;
exit 0
fi
if [ -t 1 ]; then
read -t "$1" -u 1 || :;
exit 0
fi
if [ -t 2 ]; then
read -t "$1" -u 2 || :;
exit 0
fi
fi
fi
# fallback to a busy wait
SECONDS=0
while [[ $SECONDS -lt $1 ]]; do
:

View File

@@ -169,23 +169,23 @@ namespace GitHub.Runner.Common
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
public static readonly string ContainerActionRunnerTemp = "actions_container_action_runner_temp";
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
public static readonly string CompareTemplateEvaluator = "actions_runner_compare_template_evaluator";
public static readonly string CompareWorkflowParser = "actions_runner_compare_workflow_parser";
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
}
// Node version migration related constants
public static class NodeMigration
{
// Node versions
public static readonly string Node20 = "node20";
public static readonly string Node24 = "node24";
// Environment variables for controlling node version selection
public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24";
public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
// Feature flags for controlling the migration phases
public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault";
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";

View File

@@ -378,7 +378,7 @@ namespace GitHub.Runner.Worker
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
{
var manifestManager = HostContext.GetService<IActionManifestManager>();
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
if (File.Exists(manifestFile))
{
definition.Data = manifestManager.Load(executionContext, manifestFile);
@@ -964,7 +964,7 @@ namespace GitHub.Runner.Worker
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
{
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
var manifestManager = HostContext.GetService<IActionManifestManager>();
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
ActionDefinitionData actionDefinitionData = null;
if (File.Exists(actionManifest))
{

View File

@@ -2,29 +2,29 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Reflection;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Reflection;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Actions.WorkflowParser;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using GitHub.Actions.Expressions.Data;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManager))]
public interface IActionManifestManager : IRunnerService
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
DictionaryExpressionData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, ExpressionData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, ExpressionData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, ExpressionData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
@@ -50,10 +50,10 @@ namespace GitHub.Runner.Worker
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
}
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionData actionDefinition = new();
ActionDefinitionDataNew actionDefinition = new();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
@@ -160,21 +160,21 @@ namespace GitHub.Runner.Worker
return actionDefinition;
}
public DictionaryContextData EvaluateCompositeOutputs(
public DictionaryExpressionData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
IDictionary<string, ExpressionData> extraExpressionValues)
{
var result = default(DictionaryContextData);
DictionaryExpressionData result = null;
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null);
templateContext.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
result = token.ToExpressionData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
@@ -184,13 +184,13 @@ namespace GitHub.Runner.Worker
templateContext.Errors.Check();
}
return result ?? new DictionaryContextData();
return result ?? new DictionaryExpressionData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
IDictionary<string, ExpressionData> extraExpressionValues)
{
var result = new List<string>();
@@ -199,7 +199,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null);
templateContext.Errors.Check();
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -229,7 +229,7 @@ namespace GitHub.Runner.Worker
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
IDictionary<string, ExpressionData> extraExpressionValues)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -238,7 +238,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null);
templateContext.Errors.Check();
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -281,7 +281,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null);
templateContext.Errors.Check();
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -303,7 +303,7 @@ namespace GitHub.Runner.Worker
private TemplateContext CreateTemplateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> extraExpressionValues = null)
IDictionary<string, ExpressionData> extraExpressionValues = null)
{
var result = new TemplateContext
{
@@ -314,13 +314,18 @@ namespace GitHub.Runner.Worker
maxEvents: 1000000,
maxBytes: 10 * 1024 * 1024),
Schema = _actionManifestSchema,
TraceWriter = executionContext.ToTemplateTraceWriter(),
// TODO: Switch to real tracewriter for cutover
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
AllowCaseFunction = false,
};
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
// Convert old PipelineContextData to new ExpressionData
var json = StringUtil.ConvertToJson(pair.Value, Newtonsoft.Json.Formatting.None);
var newValue = StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.ExpressionData>(json);
result.ExpressionValues[pair.Key] = newValue;
}
// Extra expression values
@@ -332,10 +337,19 @@ namespace GitHub.Runner.Worker
}
}
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
// Expression functions
foreach (var func in executionContext.ExpressionFunctions)
{
result.ExpressionFunctions.Add(item);
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
{
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported in ActionManifestManager")
};
result.ExpressionFunctions.Add(newFunc);
}
// Add the file table from the Execution Context
@@ -368,7 +382,7 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var steps = default(List<Pipelines.Step>);
var steps = default(List<GitHub.Actions.WorkflowParser.IStep>);
foreach (var run in runsMapping)
{
@@ -416,7 +430,7 @@ namespace GitHub.Runner.Worker
break;
case "steps":
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
steps = WorkflowTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
default:
@@ -435,7 +449,7 @@ namespace GitHub.Runner.Worker
}
else
{
return new ContainerActionExecutionData()
return new ContainerActionExecutionDataNew()
{
Image = imageToken.Value,
Arguments = argsToken,
@@ -478,11 +492,11 @@ namespace GitHub.Runner.Worker
}
else
{
return new CompositeActionExecutionData()
return new CompositeActionExecutionDataNew()
{
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
PreSteps = new List<Pipelines.ActionStep>(),
PostSteps = new Stack<Pipelines.ActionStep>(),
Steps = steps,
PreSteps = new List<GitHub.Actions.WorkflowParser.IStep>(),
PostSteps = new Stack<GitHub.Actions.WorkflowParser.IStep>(),
InitCondition = "always()",
CleanupCondition = "always()",
Outputs = outputs
@@ -507,7 +521,7 @@ namespace GitHub.Runner.Worker
private void ConvertInputs(
TemplateToken inputsToken,
ActionDefinitionData actionDefinition)
ActionDefinitionDataNew actionDefinition)
{
actionDefinition.Inputs = new MappingToken(null, null, null);
var inputsMapping = inputsToken.AssertMapping("inputs");
@@ -542,5 +556,49 @@ namespace GitHub.Runner.Worker
}
}
}
public sealed class ActionDefinitionDataNew
{
public string Name { get; set; }
public string Description { get; set; }
public MappingToken Inputs { get; set; }
public ActionExecutionData Execution { get; set; }
public Dictionary<String, String> Deprecated { get; set; }
}
public sealed class ContainerActionExecutionDataNew : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public string Image { get; set; }
public string EntryPoint { get; set; }
public SequenceToken Arguments { get; set; }
public MappingToken Environment { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
}
public sealed class CompositeActionExecutionDataNew : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => PreSteps.Count > 0;
public override bool HasPost => PostSteps.Count > 0;
public List<GitHub.Actions.WorkflowParser.IStep> PreSteps { get; set; }
public List<GitHub.Actions.WorkflowParser.IStep> Steps { get; set; }
public Stack<GitHub.Actions.WorkflowParser.IStep> PostSteps { get; set; }
public MappingToken Outputs { get; set; }
}
}

View File

@@ -0,0 +1,547 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Reflection;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManagerLegacy))]
public interface IActionManifestManagerLegacy : IRunnerService
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
public sealed class ActionManifestManagerLegacy : RunnerService, IActionManifestManagerLegacy
{
private TemplateSchema _actionManifestSchema;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
var assembly = Assembly.GetExecutingAssembly();
var json = default(string);
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Worker.action_yaml.json"))
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
_actionManifestSchema = TemplateSchema.Load(objectReader);
ArgUtil.NotNull(_actionManifestSchema, nameof(_actionManifestSchema));
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
}
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionData actionDefinition = new();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.Global.FileTable.Count)
{
executionContext.Global.FileTable.Add(fileRelativePath);
}
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
var actionOutputs = default(MappingToken);
var actionRunValueToken = default(TemplateToken);
foreach (var actionPair in actionMapping)
{
var propertyName = actionPair.Key.AssertString($"action.yml property key");
switch (propertyName.Value)
{
case "name":
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
break;
case "outputs":
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
case "description":
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
break;
case "inputs":
ConvertInputs(actionPair.Value, actionDefinition);
break;
case "runs":
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
actionRunValueToken = actionPair.Value;
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
break;
}
}
// Evaluate Runs Last
if (actionRunValueToken != null)
{
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
}
}
catch (Exception ex)
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
if (templateContext.Errors.Count > 0)
{
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Failed to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
}
else
{
Trace.Info($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
}
return actionDefinition;
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = default(DictionaryContextData);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new List<string>();
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// Sequence
var args = evaluateResult.AssertSequence("container args");
foreach (var arg in args)
{
var str = arg.AssertString("container arg").Value;
result.Add(str);
Trace.Info($"Add argument {str}");
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// Mapping
var mapping = evaluateResult.AssertMapping("container env");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("container env key");
// Literal value
var value = pair.Value.AssertString("container env value");
result[key.Value] = value.Value;
Trace.Info($"Add env {key} = {value}");
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token)
{
string result = "";
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// String
result = evaluateResult.AssertString($"default value for input '{inputName}'").Value;
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
private TemplateContext CreateTemplateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> extraExpressionValues = null)
{
var result = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
Memory = new TemplateMemory(
maxDepth: 100,
maxEvents: 1000000,
maxBytes: 10 * 1024 * 1024),
Schema = _actionManifestSchema,
TraceWriter = executionContext.ToTemplateTraceWriter(),
AllowCaseFunction = false,
};
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
// Extra expression values
if (extraExpressionValues?.Count > 0)
{
foreach (var pair in extraExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
{
result.ExpressionFunctions.Add(item);
}
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
{
result.GetFileId(executionContext.Global.FileTable[i]);
}
return result;
}
private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext templateContext,
TemplateToken inputsToken,
String fileRelativePath,
MappingToken outputs = null)
{
var runsMapping = inputsToken.AssertMapping("runs");
var usingToken = default(StringToken);
var imageToken = default(StringToken);
var argsToken = default(SequenceToken);
var entrypointToken = default(StringToken);
var envToken = default(MappingToken);
var mainToken = default(StringToken);
var pluginToken = default(StringToken);
var preToken = default(StringToken);
var preEntrypointToken = default(StringToken);
var preIfToken = default(StringToken);
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var steps = default(List<Pipelines.Step>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
switch (runsKey)
{
case "using":
usingToken = run.Value.AssertString("using");
break;
case "image":
imageToken = run.Value.AssertString("image");
break;
case "args":
argsToken = run.Value.AssertSequence("args");
break;
case "entrypoint":
entrypointToken = run.Value.AssertString("entrypoint");
break;
case "env":
envToken = run.Value.AssertMapping("env");
break;
case "main":
mainToken = run.Value.AssertString("main");
break;
case "plugin":
pluginToken = run.Value.AssertString("plugin");
break;
case "post":
postToken = run.Value.AssertString("post");
break;
case "post-entrypoint":
postEntrypointToken = run.Value.AssertString("post-entrypoint");
break;
case "post-if":
postIfToken = run.Value.AssertString("post-if");
break;
case "pre":
preToken = run.Value.AssertString("pre");
break;
case "pre-entrypoint":
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
break;
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
}
}
if (usingToken != null)
{
if (string.Equals(usingToken.Value, "docker", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(imageToken?.Value))
{
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
}
else
{
return new ContainerActionExecutionData()
{
Image = imageToken.Value,
Arguments = argsToken,
EntryPoint = entrypointToken?.Value,
Environment = envToken,
Pre = preEntrypointToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postEntrypointToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
}
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
}
else
{
return new NodeJSActionExecutionData()
{
NodeVersion = usingToken.Value,
Script = mainToken.Value,
Pre = preToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
{
if (steps == null)
{
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
PreSteps = new List<Pipelines.ActionStep>(),
PostSteps = new Stack<Pipelines.ActionStep>(),
InitCondition = "always()",
CleanupCondition = "always()",
Outputs = outputs
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead.");
}
}
else if (pluginToken != null)
{
return new PluginActionExecutionData()
{
Plugin = pluginToken.Value
};
}
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.");
}
private void ConvertInputs(
TemplateToken inputsToken,
ActionDefinitionData actionDefinition)
{
actionDefinition.Inputs = new MappingToken(null, null, null);
var inputsMapping = inputsToken.AssertMapping("inputs");
foreach (var input in inputsMapping)
{
bool hasDefault = false;
var inputName = input.Key.AssertString("input name");
var inputMetadata = input.Value.AssertMapping("input metadata");
foreach (var metadata in inputMetadata)
{
var metadataName = metadata.Key.AssertString("input metadata").Value;
if (string.Equals(metadataName, "default", StringComparison.OrdinalIgnoreCase))
{
hasDefault = true;
actionDefinition.Inputs.Add(inputName, metadata.Value);
}
else if (string.Equals(metadataName, "deprecationMessage", StringComparison.OrdinalIgnoreCase))
{
if (actionDefinition.Deprecated == null)
{
actionDefinition.Deprecated = new Dictionary<String, String>();
}
var message = metadata.Value.AssertString("input deprecationMessage");
actionDefinition.Deprecated.Add(inputName.Value, message.Value);
}
}
if (!hasDefault)
{
actionDefinition.Inputs.Add(inputName, new StringToken(null, null, null, string.Empty));
}
}
}
}
}

View File

@@ -0,0 +1,701 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.WorkflowParser;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManagerWrapper))]
public interface IActionManifestManagerWrapper : IRunnerService
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
public sealed class ActionManifestManagerWrapper : RunnerService, IActionManifestManagerWrapper
{
private IActionManifestManagerLegacy _legacyManager;
private IActionManifestManager _newManager;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_legacyManager = hostContext.GetService<IActionManifestManagerLegacy>();
_newManager = hostContext.GetService<IActionManifestManager>();
}
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
return EvaluateAndCompare(
executionContext,
"Load",
() => _legacyManager.Load(executionContext, manifestFile),
() => ConvertToLegacyActionDefinitionData(_newManager.Load(executionContext, manifestFile)),
(legacyResult, newResult) => CompareActionDefinition(legacyResult, newResult));
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateCompositeOutputs",
() => _legacyManager.EvaluateCompositeOutputs(executionContext, token, extraExpressionValues),
() => ConvertToLegacyContextData<DictionaryContextData>(_newManager.EvaluateCompositeOutputs(executionContext, ConvertToNewToken(token), ConvertToNewExpressionValues(extraExpressionValues))),
(legacyResult, newResult) => CompareDictionaryContextData(legacyResult, newResult));
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateContainerArguments",
() => _legacyManager.EvaluateContainerArguments(executionContext, token, extraExpressionValues),
() => _newManager.EvaluateContainerArguments(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.SequenceToken, ConvertToNewExpressionValues(extraExpressionValues)),
(legacyResult, newResult) => CompareLists(legacyResult, newResult, "ContainerArguments"));
}
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateContainerEnvironment",
() => _legacyManager.EvaluateContainerEnvironment(executionContext, token, extraExpressionValues),
() => _newManager.EvaluateContainerEnvironment(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.MappingToken, ConvertToNewExpressionValues(extraExpressionValues)),
(legacyResult, newResult) => {
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
return CompareDictionaries(trace, legacyResult, newResult, "ContainerEnvironment");
});
}
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token)
{
return EvaluateAndCompare(
executionContext,
"EvaluateDefaultInput",
() => _legacyManager.EvaluateDefaultInput(executionContext, inputName, token),
() => _newManager.EvaluateDefaultInput(executionContext, inputName, ConvertToNewToken(token)),
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
}
// Conversion helper methods
private ActionDefinitionData ConvertToLegacyActionDefinitionData(ActionDefinitionDataNew newData)
{
if (newData == null)
{
return null;
}
return new ActionDefinitionData
{
Name = newData.Name,
Description = newData.Description,
Inputs = ConvertToLegacyToken<MappingToken>(newData.Inputs),
Deprecated = newData.Deprecated,
Execution = ConvertToLegacyExecution(newData.Execution)
};
}
private ActionExecutionData ConvertToLegacyExecution(ActionExecutionData execution)
{
if (execution == null)
{
return null;
}
// Handle different execution types
if (execution is ContainerActionExecutionDataNew containerNew)
{
return new ContainerActionExecutionData
{
Image = containerNew.Image,
EntryPoint = containerNew.EntryPoint,
Arguments = ConvertToLegacyToken<SequenceToken>(containerNew.Arguments),
Environment = ConvertToLegacyToken<MappingToken>(containerNew.Environment),
Pre = containerNew.Pre,
Post = containerNew.Post,
InitCondition = containerNew.InitCondition,
CleanupCondition = containerNew.CleanupCondition
};
}
else if (execution is CompositeActionExecutionDataNew compositeNew)
{
return new CompositeActionExecutionData
{
Steps = ConvertToLegacySteps(compositeNew.Steps),
Outputs = ConvertToLegacyToken<MappingToken>(compositeNew.Outputs)
};
}
else
{
// For NodeJS and Plugin execution, they don't use new token types, so just return as-is
return execution;
}
}
private List<GitHub.DistributedTask.Pipelines.ActionStep> ConvertToLegacySteps(List<GitHub.Actions.WorkflowParser.IStep> newSteps)
{
if (newSteps == null)
{
return null;
}
// Serialize new steps and deserialize to old steps
var json = StringUtil.ConvertToJson(newSteps, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<List<GitHub.DistributedTask.Pipelines.ActionStep>>(json);
}
private T ConvertToLegacyToken<T>(GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newToken) where T : TemplateToken
{
if (newToken == null)
{
return null;
}
// Serialize and deserialize to convert between token types
var json = StringUtil.ConvertToJson(newToken, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<T>(json);
}
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToNewToken(TemplateToken legacyToken)
{
if (legacyToken == null)
{
return null;
}
var json = StringUtil.ConvertToJson(legacyToken, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
}
private IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData> ConvertToNewExpressionValues(IDictionary<string, PipelineContextData> legacyValues)
{
if (legacyValues == null)
{
return null;
}
var json = StringUtil.ConvertToJson(legacyValues, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData>>(json);
}
private T ConvertToLegacyContextData<T>(GitHub.Actions.Expressions.Data.ExpressionData newData) where T : PipelineContextData
{
if (newData == null)
{
return null;
}
var json = StringUtil.ConvertToJson(newData, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<T>(json);
}
// Comparison helper methods
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
IExecutionContext context,
string methodName,
Func<TLegacy> legacyEvaluator,
Func<TNew> newEvaluator,
Func<TLegacy, TNew, bool> resultComparer)
{
// Legacy only?
if (!((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false)
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER"))))
{
return legacyEvaluator();
}
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
// Legacy evaluator
var legacyException = default(Exception);
var legacyResult = default(TLegacy);
try
{
legacyResult = legacyEvaluator();
}
catch (Exception ex)
{
legacyException = ex;
}
// Compare with new evaluator
try
{
ArgUtil.NotNull(context, nameof(context));
trace.Info(methodName);
// New evaluator
var newException = default(Exception);
var newResult = default(TNew);
try
{
newResult = newEvaluator();
}
catch (Exception ex)
{
newException = ex;
}
// Compare results or exceptions
if (legacyException != null || newException != null)
{
// Either one or both threw exceptions - compare them
if (!CompareExceptions(trace, legacyException, newException))
{
trace.Info($"{methodName} exception mismatch");
RecordMismatch(context, $"{methodName}");
}
}
else
{
// Both succeeded - compare results
// Skip comparison if new implementation returns null (not yet implemented)
if (newResult != null && !resultComparer(legacyResult, newResult))
{
trace.Info($"{methodName} mismatch");
RecordMismatch(context, $"{methodName}");
}
}
}
catch (Exception ex)
{
trace.Info($"Comparison failed: {ex.Message}");
RecordComparisonError(context, $"{methodName}: {ex.Message}");
}
// Re-throw legacy exception if any
if (legacyException != null)
{
throw legacyException;
}
return legacyResult;
}
private void RecordMismatch(IExecutionContext context, string methodName)
{
if (!context.Global.HasActionManifestMismatch)
{
context.Global.HasActionManifestMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestMismatch: {methodName}" };
context.Global.JobTelemetry.Add(telemetry);
}
}
private void RecordComparisonError(IExecutionContext context, string errorDetails)
{
if (!context.Global.HasActionManifestMismatch)
{
context.Global.HasActionManifestMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestComparisonError: {errorDetails}" };
context.Global.JobTelemetry.Add(telemetry);
}
}
private bool CompareActionDefinition(ActionDefinitionData legacyResult, ActionDefinitionData newResult)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacyResult == null && newResult == null)
{
return true;
}
if (legacyResult == null || newResult == null)
{
trace.Info($"CompareActionDefinition mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
return false;
}
if (!string.Equals(legacyResult.Name, newResult.Name, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Name differs (legacy='{legacyResult.Name}', new='{newResult.Name}')");
return false;
}
if (!string.Equals(legacyResult.Description, newResult.Description, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Description differs (legacy='{legacyResult.Description}', new='{newResult.Description}')");
return false;
}
// Compare Inputs token
var legacyInputsJson = legacyResult.Inputs != null ? StringUtil.ConvertToJson(legacyResult.Inputs) : null;
var newInputsJson = newResult.Inputs != null ? StringUtil.ConvertToJson(newResult.Inputs) : null;
if (!string.Equals(legacyInputsJson, newInputsJson, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Inputs differ");
return false;
}
// Compare Deprecated
if (!CompareDictionaries(trace, legacyResult.Deprecated, newResult.Deprecated, "Deprecated"))
{
return false;
}
// Compare Execution
if (!CompareExecution(trace, legacyResult.Execution, newResult.Execution))
{
return false;
}
return true;
}
private bool CompareExecution(Tracing trace, ActionExecutionData legacy, ActionExecutionData newExecution)
{
if (legacy == null && newExecution == null)
{
return true;
}
if (legacy == null || newExecution == null)
{
trace.Info($"CompareExecution mismatch - one is null (legacy={legacy == null}, new={newExecution == null})");
return false;
}
if (legacy.GetType() != newExecution.GetType())
{
trace.Info($"CompareExecution mismatch - different types (legacy={legacy.GetType().Name}, new={newExecution.GetType().Name})");
return false;
}
// Compare based on type
if (legacy is NodeJSActionExecutionData legacyNode && newExecution is NodeJSActionExecutionData newNode)
{
return CompareNodeJSExecution(trace, legacyNode, newNode);
}
else if (legacy is ContainerActionExecutionData legacyContainer && newExecution is ContainerActionExecutionData newContainer)
{
return CompareContainerExecution(trace, legacyContainer, newContainer);
}
else if (legacy is CompositeActionExecutionData legacyComposite && newExecution is CompositeActionExecutionData newComposite)
{
return CompareCompositeExecution(trace, legacyComposite, newComposite);
}
else if (legacy is PluginActionExecutionData legacyPlugin && newExecution is PluginActionExecutionData newPlugin)
{
return ComparePluginExecution(trace, legacyPlugin, newPlugin);
}
return true;
}
private bool CompareNodeJSExecution(Tracing trace, NodeJSActionExecutionData legacy, NodeJSActionExecutionData newExecution)
{
if (!string.Equals(legacy.NodeVersion, newExecution.NodeVersion, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - NodeVersion differs (legacy='{legacy.NodeVersion}', new='{newExecution.NodeVersion}')");
return false;
}
if (!string.Equals(legacy.Script, newExecution.Script, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Script differs (legacy='{legacy.Script}', new='{newExecution.Script}')");
return false;
}
if (!string.Equals(legacy.Pre, newExecution.Pre, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Pre differs");
return false;
}
if (!string.Equals(legacy.Post, newExecution.Post, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Post differs");
return false;
}
if (!string.Equals(legacy.InitCondition, newExecution.InitCondition, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - InitCondition differs");
return false;
}
if (!string.Equals(legacy.CleanupCondition, newExecution.CleanupCondition, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - CleanupCondition differs");
return false;
}
return true;
}
private bool CompareContainerExecution(Tracing trace, ContainerActionExecutionData legacy, ContainerActionExecutionData newExecution)
{
if (!string.Equals(legacy.Image, newExecution.Image, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Image differs");
return false;
}
if (!string.Equals(legacy.EntryPoint, newExecution.EntryPoint, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - EntryPoint differs");
return false;
}
// Compare Arguments token
var legacyArgsJson = legacy.Arguments != null ? StringUtil.ConvertToJson(legacy.Arguments) : null;
var newArgsJson = newExecution.Arguments != null ? StringUtil.ConvertToJson(newExecution.Arguments) : null;
if (!string.Equals(legacyArgsJson, newArgsJson, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Arguments differ");
return false;
}
// Compare Environment token
var legacyEnvJson = legacy.Environment != null ? StringUtil.ConvertToJson(legacy.Environment) : null;
var newEnvJson = newExecution.Environment != null ? StringUtil.ConvertToJson(newExecution.Environment) : null;
if (!string.Equals(legacyEnvJson, newEnvJson, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Environment differs");
return false;
}
return true;
}
private bool CompareCompositeExecution(Tracing trace, CompositeActionExecutionData legacy, CompositeActionExecutionData newExecution)
{
// Compare Steps
if (legacy.Steps?.Count != newExecution.Steps?.Count)
{
trace.Info($"CompareCompositeExecution mismatch - Steps.Count differs (legacy={legacy.Steps?.Count}, new={newExecution.Steps?.Count})");
return false;
}
// Compare Outputs token
var legacyOutputsJson = legacy.Outputs != null ? StringUtil.ConvertToJson(legacy.Outputs) : null;
var newOutputsJson = newExecution.Outputs != null ? StringUtil.ConvertToJson(newExecution.Outputs) : null;
if (!string.Equals(legacyOutputsJson, newOutputsJson, StringComparison.Ordinal))
{
trace.Info($"CompareCompositeExecution mismatch - Outputs differ");
return false;
}
return true;
}
private bool ComparePluginExecution(Tracing trace, PluginActionExecutionData legacy, PluginActionExecutionData newExecution)
{
if (!string.Equals(legacy.Plugin, newExecution.Plugin, StringComparison.Ordinal))
{
trace.Info($"ComparePluginExecution mismatch - Plugin differs");
return false;
}
return true;
}
private bool CompareDictionaryContextData(DictionaryContextData legacy, DictionaryContextData newData)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacy == null && newData == null)
{
return true;
}
if (legacy == null || newData == null)
{
trace.Info($"CompareDictionaryContextData mismatch - one is null (legacy={legacy == null}, new={newData == null})");
return false;
}
var legacyJson = StringUtil.ConvertToJson(legacy);
var newJson = StringUtil.ConvertToJson(newData);
if (!string.Equals(legacyJson, newJson, StringComparison.Ordinal))
{
trace.Info($"CompareDictionaryContextData mismatch");
return false;
}
return true;
}
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacyList == null && newList == null)
{
return true;
}
if (legacyList == null || newList == null)
{
trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
return false;
}
if (legacyList.Count != newList.Count)
{
trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
return false;
}
for (int i = 0; i < legacyList.Count; i++)
{
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
{
trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
return false;
}
}
return true;
}
private bool CompareDictionaries(Tracing trace, IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
{
if (legacyDict == null && newDict == null)
{
return true;
}
if (legacyDict == null || newDict == null)
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
return false;
}
if (legacyDict is Dictionary<string, string> legacyTypedDict && newDict is Dictionary<string, string> newTypedDict)
{
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
return false;
}
}
if (legacyDict.Count != newDict.Count)
{
trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
return false;
}
foreach (var kvp in legacyDict)
{
if (!newDict.TryGetValue(kvp.Key, out var newValue))
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
return false;
}
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
{
trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
return false;
}
}
return true;
}
private bool CompareExceptions(Tracing trace, Exception legacyException, Exception newException)
{
if (legacyException == null && newException == null)
{
return true;
}
if (legacyException == null || newException == null)
{
trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
return false;
}
// Compare exception messages recursively (including inner exceptions)
var legacyMessages = GetExceptionMessages(legacyException);
var newMessages = GetExceptionMessages(newException);
if (legacyMessages.Count != newMessages.Count)
{
trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
return false;
}
for (int i = 0; i < legacyMessages.Count; i++)
{
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
{
trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
return false;
}
}
return true;
}
private IList<string> GetExceptionMessages(Exception ex)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
var messages = new List<string>();
var toProcess = new Queue<Exception>();
toProcess.Enqueue(ex);
int count = 0;
while (toProcess.Count > 0 && count < 50)
{
var current = toProcess.Dequeue();
if (current == null) continue;
messages.Add(current.Message);
count++;
// Special handling for AggregateException - enqueue all inner exceptions
if (current is AggregateException aggregateEx)
{
foreach (var innerEx in aggregateEx.InnerExceptions)
{
if (innerEx != null && count < 50)
{
toProcess.Enqueue(innerEx);
}
}
}
else if (current.InnerException != null)
{
toProcess.Enqueue(current.InnerException);
}
// Failsafe: if we have too many exceptions, stop and return what we have
if (count >= 50)
{
trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
break;
}
}
return messages;
}
}
}

View File

@@ -206,7 +206,7 @@ namespace GitHub.Runner.Worker
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
var manifestManager = HostContext.GetService<IActionManifestManager>();
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
foreach (var input in definition.Data.Inputs)
{
string key = input.Key.AssertString("action input name").Value;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,480 @@
using System;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using Newtonsoft.Json;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// DAP Server interface for handling Debug Adapter Protocol connections.
/// </summary>
[ServiceLocator(Default = typeof(DapServer))]
public interface IDapServer : IRunnerService, IDisposable
{
/// <summary>
/// Starts the DAP TCP server on the specified port.
/// </summary>
/// <param name="port">The port to listen on (default: 4711)</param>
/// <param name="cancellationToken">Cancellation token</param>
Task StartAsync(int port, CancellationToken cancellationToken);
/// <summary>
/// Blocks until a debug client connects.
/// </summary>
/// <param name="cancellationToken">Cancellation token</param>
Task WaitForConnectionAsync(CancellationToken cancellationToken);
/// <summary>
/// Stops the DAP server and closes all connections.
/// </summary>
Task StopAsync();
/// <summary>
/// Sets the debug session that will handle DAP requests.
/// </summary>
/// <param name="session">The debug session</param>
void SetSession(IDapDebugSession session);
/// <summary>
/// Sends an event to the connected debug client.
/// </summary>
/// <param name="evt">The event to send</param>
void SendEvent(Event evt);
/// <summary>
/// Gets whether a debug client is currently connected.
/// </summary>
bool IsConnected { get; }
}
/// <summary>
/// TCP server implementation of the Debug Adapter Protocol.
/// Handles message framing (Content-Length headers) and JSON serialization.
/// </summary>
public sealed class DapServer : RunnerService, IDapServer
{
private const string ContentLengthHeader = "Content-Length: ";
private const string HeaderTerminator = "\r\n\r\n";
private TcpListener _listener;
private TcpClient _client;
private NetworkStream _stream;
private IDapDebugSession _session;
private CancellationTokenSource _cts;
private Task _messageLoopTask;
private TaskCompletionSource<bool> _connectionTcs;
private int _nextSeq = 1;
private readonly object _sendLock = new object();
private bool _disposed = false;
public bool IsConnected => _client?.Connected == true;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
Trace.Info("DapServer initialized");
}
public void SetSession(IDapDebugSession session)
{
_session = session;
Trace.Info("Debug session set");
}
public async Task StartAsync(int port, CancellationToken cancellationToken)
{
Trace.Info($"Starting DAP server on port {port}");
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_connectionTcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
try
{
_listener = new TcpListener(IPAddress.Loopback, port);
_listener.Start();
Trace.Info($"DAP server listening on 127.0.0.1:{port}");
// Start accepting connections in the background
_ = AcceptConnectionAsync(_cts.Token);
}
catch (Exception ex)
{
Trace.Error($"Failed to start DAP server: {ex.Message}");
throw;
}
await Task.CompletedTask;
}
private async Task AcceptConnectionAsync(CancellationToken cancellationToken)
{
try
{
Trace.Info("Waiting for debug client connection...");
// Use cancellation-aware accept
using (cancellationToken.Register(() => _listener?.Stop()))
{
_client = await _listener.AcceptTcpClientAsync();
}
if (cancellationToken.IsCancellationRequested)
{
return;
}
_stream = _client.GetStream();
var remoteEndPoint = _client.Client.RemoteEndPoint;
Trace.Info($"Debug client connected from {remoteEndPoint}");
// Signal that connection is established
_connectionTcs.TrySetResult(true);
// Start processing messages
_messageLoopTask = ProcessMessagesAsync(_cts.Token);
}
catch (ObjectDisposedException) when (cancellationToken.IsCancellationRequested)
{
// Expected when cancellation stops the listener
Trace.Info("Connection accept cancelled");
_connectionTcs.TrySetCanceled();
}
catch (SocketException ex) when (cancellationToken.IsCancellationRequested)
{
// Expected when cancellation stops the listener
Trace.Info($"Connection accept cancelled: {ex.Message}");
_connectionTcs.TrySetCanceled();
}
catch (Exception ex)
{
Trace.Error($"Error accepting connection: {ex.Message}");
_connectionTcs.TrySetException(ex);
}
}
public async Task WaitForConnectionAsync(CancellationToken cancellationToken)
{
Trace.Info("Waiting for debug client to connect...");
using (cancellationToken.Register(() => _connectionTcs.TrySetCanceled()))
{
await _connectionTcs.Task;
}
Trace.Info("Debug client connected");
}
public async Task StopAsync()
{
Trace.Info("Stopping DAP server");
_cts?.Cancel();
// Wait for message loop to complete
if (_messageLoopTask != null)
{
try
{
await _messageLoopTask;
}
catch (OperationCanceledException)
{
// Expected
}
catch (Exception ex)
{
Trace.Warning($"Message loop ended with error: {ex.Message}");
}
}
// Clean up resources
_stream?.Close();
_client?.Close();
_listener?.Stop();
Trace.Info("DAP server stopped");
}
public void SendEvent(Event evt)
{
if (!IsConnected)
{
Trace.Warning($"Cannot send event '{evt.EventType}': no client connected");
return;
}
try
{
lock (_sendLock)
{
evt.Seq = _nextSeq++;
SendMessageInternal(evt);
}
Trace.Info($"Sent event: {evt.EventType}");
}
catch (Exception ex)
{
Trace.Error($"Failed to send event '{evt.EventType}': {ex.Message}");
}
}
private async Task ProcessMessagesAsync(CancellationToken cancellationToken)
{
Trace.Info("Starting DAP message processing loop");
try
{
while (!cancellationToken.IsCancellationRequested && IsConnected)
{
var json = await ReadMessageAsync(cancellationToken);
if (json == null)
{
Trace.Info("Client disconnected (end of stream)");
break;
}
await ProcessMessageAsync(json, cancellationToken);
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
Trace.Info("Message processing cancelled");
}
catch (IOException ex)
{
Trace.Info($"Connection closed: {ex.Message}");
}
catch (Exception ex)
{
Trace.Error($"Error in message loop: {ex}");
}
Trace.Info("DAP message processing loop ended");
}
private async Task ProcessMessageAsync(string json, CancellationToken cancellationToken)
{
Request request = null;
try
{
// Parse the incoming message
request = JsonConvert.DeserializeObject<Request>(json);
if (request == null || request.Type != "request")
{
Trace.Warning($"Received non-request message: {json}");
return;
}
Trace.Info($"Received request: seq={request.Seq}, command={request.Command}");
// Dispatch to session for handling
if (_session == null)
{
Trace.Error("No debug session configured");
SendErrorResponse(request, "No debug session configured");
return;
}
var response = await _session.HandleRequestAsync(request);
response.RequestSeq = request.Seq;
response.Command = request.Command;
response.Type = "response";
lock (_sendLock)
{
response.Seq = _nextSeq++;
SendMessageInternal(response);
}
Trace.Info($"Sent response: seq={response.Seq}, command={response.Command}, success={response.Success}");
}
catch (JsonException ex)
{
Trace.Error($"Failed to parse request: {ex.Message}");
Trace.Error($"JSON: {json}");
}
catch (Exception ex)
{
Trace.Error($"Error processing request: {ex}");
if (request != null)
{
SendErrorResponse(request, ex.Message);
}
}
}
private void SendErrorResponse(Request request, string message)
{
var response = new Response
{
Type = "response",
RequestSeq = request.Seq,
Command = request.Command,
Success = false,
Message = message,
Body = new ErrorResponseBody
{
Error = new Message
{
Id = 1,
Format = message,
ShowUser = true
}
}
};
lock (_sendLock)
{
response.Seq = _nextSeq++;
SendMessageInternal(response);
}
}
/// <summary>
/// Reads a DAP message from the stream.
/// DAP uses HTTP-like message framing: Content-Length: N\r\n\r\n{json}
/// </summary>
private async Task<string> ReadMessageAsync(CancellationToken cancellationToken)
{
// Read headers until we find Content-Length
var headerBuilder = new StringBuilder();
int contentLength = -1;
while (true)
{
var line = await ReadLineAsync(cancellationToken);
if (line == null)
{
// End of stream
return null;
}
if (line.Length == 0)
{
// Empty line marks end of headers
break;
}
headerBuilder.AppendLine(line);
if (line.StartsWith(ContentLengthHeader, StringComparison.OrdinalIgnoreCase))
{
var lengthStr = line.Substring(ContentLengthHeader.Length).Trim();
if (!int.TryParse(lengthStr, out contentLength))
{
throw new InvalidDataException($"Invalid Content-Length: {lengthStr}");
}
}
}
if (contentLength < 0)
{
throw new InvalidDataException("Missing Content-Length header");
}
// Read the JSON body
var buffer = new byte[contentLength];
var totalRead = 0;
while (totalRead < contentLength)
{
var bytesRead = await _stream.ReadAsync(buffer, totalRead, contentLength - totalRead, cancellationToken);
if (bytesRead == 0)
{
throw new EndOfStreamException("Connection closed while reading message body");
}
totalRead += bytesRead;
}
var json = Encoding.UTF8.GetString(buffer);
Trace.Verbose($"Received: {json}");
return json;
}
/// <summary>
/// Reads a line from the stream (terminated by \r\n).
/// </summary>
private async Task<string> ReadLineAsync(CancellationToken cancellationToken)
{
var lineBuilder = new StringBuilder();
var buffer = new byte[1];
var previousWasCr = false;
while (true)
{
var bytesRead = await _stream.ReadAsync(buffer, 0, 1, cancellationToken);
if (bytesRead == 0)
{
// End of stream
return lineBuilder.Length > 0 ? lineBuilder.ToString() : null;
}
var c = (char)buffer[0];
if (c == '\n' && previousWasCr)
{
// Found \r\n, return the line (without the \r)
if (lineBuilder.Length > 0 && lineBuilder[lineBuilder.Length - 1] == '\r')
{
lineBuilder.Length--;
}
return lineBuilder.ToString();
}
previousWasCr = (c == '\r');
lineBuilder.Append(c);
}
}
/// <summary>
/// Sends a DAP message to the stream with Content-Length framing.
/// Must be called within the _sendLock.
/// </summary>
private void SendMessageInternal(ProtocolMessage message)
{
var json = JsonConvert.SerializeObject(message, new JsonSerializerSettings
{
NullValueHandling = NullValueHandling.Ignore
});
var bodyBytes = Encoding.UTF8.GetBytes(json);
var header = $"Content-Length: {bodyBytes.Length}\r\n\r\n";
var headerBytes = Encoding.UTF8.GetBytes(header);
_stream.Write(headerBytes, 0, headerBytes.Length);
_stream.Write(bodyBytes, 0, bodyBytes.Length);
_stream.Flush();
Trace.Verbose($"Sent: {json}");
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (_disposed)
{
return;
}
if (disposing)
{
_cts?.Cancel();
_stream?.Dispose();
_client?.Dispose();
_listener?.Stop();
_cts?.Dispose();
}
_disposed = true;
}
}
}

View File

@@ -0,0 +1,293 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.Runner.Common;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// Provides DAP variable information from the execution context.
/// Maps workflow contexts (github, env, runner, job, steps, secrets) to DAP scopes and variables.
/// </summary>
public sealed class DapVariableProvider
{
// Well-known scope names that map to top-level contexts
private static readonly string[] ScopeNames = { "github", "env", "runner", "job", "steps", "secrets", "inputs", "vars", "matrix", "needs" };
// Reserved variable reference ranges for scopes (1-100)
private const int ScopeReferenceBase = 1;
private const int ScopeReferenceMax = 100;
// Dynamic variable references start after scope range
private const int DynamicReferenceBase = 101;
private readonly IHostContext _hostContext;
private readonly Dictionary<int, (PipelineContextData Data, string Path)> _variableReferences = new();
private int _nextVariableReference = DynamicReferenceBase;
public DapVariableProvider(IHostContext hostContext)
{
_hostContext = hostContext;
}
/// <summary>
/// Resets the variable reference state. Call this when the execution context changes.
/// </summary>
public void Reset()
{
_variableReferences.Clear();
_nextVariableReference = DynamicReferenceBase;
}
/// <summary>
/// Gets the list of scopes for a given execution context.
/// Each scope represents a top-level context like 'github', 'env', etc.
/// </summary>
public List<Scope> GetScopes(IExecutionContext context, int frameId)
{
var scopes = new List<Scope>();
if (context?.ExpressionValues == null)
{
return scopes;
}
for (int i = 0; i < ScopeNames.Length; i++)
{
var scopeName = ScopeNames[i];
if (context.ExpressionValues.TryGetValue(scopeName, out var value) && value != null)
{
var variablesRef = ScopeReferenceBase + i;
var scope = new Scope
{
Name = scopeName,
VariablesReference = variablesRef,
Expensive = false,
// Secrets get a special presentation hint
PresentationHint = scopeName == "secrets" ? "registers" : null
};
// Count named variables if it's a dictionary
if (value is DictionaryContextData dict)
{
scope.NamedVariables = dict.Count;
}
else if (value is CaseSensitiveDictionaryContextData csDict)
{
scope.NamedVariables = csDict.Count;
}
scopes.Add(scope);
}
}
return scopes;
}
/// <summary>
/// Gets variables for a given variable reference.
/// </summary>
public List<Variable> GetVariables(IExecutionContext context, int variablesReference)
{
var variables = new List<Variable>();
if (context?.ExpressionValues == null)
{
return variables;
}
PipelineContextData data = null;
string basePath = null;
bool isSecretsScope = false;
// Check if this is a scope reference (1-100)
if (variablesReference >= ScopeReferenceBase && variablesReference <= ScopeReferenceMax)
{
var scopeIndex = variablesReference - ScopeReferenceBase;
if (scopeIndex < ScopeNames.Length)
{
var scopeName = ScopeNames[scopeIndex];
isSecretsScope = scopeName == "secrets";
if (context.ExpressionValues.TryGetValue(scopeName, out data))
{
basePath = scopeName;
}
}
}
// Check dynamic references
else if (_variableReferences.TryGetValue(variablesReference, out var refData))
{
data = refData.Data;
basePath = refData.Path;
// Check if we're inside the secrets scope
isSecretsScope = basePath?.StartsWith("secrets", StringComparison.OrdinalIgnoreCase) == true;
}
if (data == null)
{
return variables;
}
// Convert the data to variables
ConvertToVariables(data, basePath, isSecretsScope, variables);
return variables;
}
/// <summary>
/// Converts PipelineContextData to DAP Variable objects.
/// </summary>
private void ConvertToVariables(PipelineContextData data, string basePath, bool isSecretsScope, List<Variable> variables)
{
switch (data)
{
case DictionaryContextData dict:
ConvertDictionaryToVariables(dict, basePath, isSecretsScope, variables);
break;
case CaseSensitiveDictionaryContextData csDict:
ConvertCaseSensitiveDictionaryToVariables(csDict, basePath, isSecretsScope, variables);
break;
case ArrayContextData array:
ConvertArrayToVariables(array, basePath, isSecretsScope, variables);
break;
default:
// Scalar value - shouldn't typically get here for a container
break;
}
}
private void ConvertDictionaryToVariables(DictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
{
foreach (var pair in dict)
{
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
variables.Add(variable);
}
}
private void ConvertCaseSensitiveDictionaryToVariables(CaseSensitiveDictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
{
foreach (var pair in dict)
{
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
variables.Add(variable);
}
}
private void ConvertArrayToVariables(ArrayContextData array, string basePath, bool isSecretsScope, List<Variable> variables)
{
for (int i = 0; i < array.Count; i++)
{
var item = array[i];
var variable = CreateVariable($"[{i}]", item, basePath, isSecretsScope);
variable.Name = $"[{i}]";
variables.Add(variable);
}
}
private Variable CreateVariable(string name, PipelineContextData value, string basePath, bool isSecretsScope)
{
var childPath = string.IsNullOrEmpty(basePath) ? name : $"{basePath}.{name}";
var variable = new Variable
{
Name = name,
EvaluateName = $"${{{{ {childPath} }}}}"
};
if (value == null)
{
variable.Value = "null";
variable.Type = "null";
variable.VariablesReference = 0;
return variable;
}
switch (value)
{
case StringContextData str:
if (isSecretsScope)
{
// Always mask secrets regardless of value
variable.Value = "[REDACTED]";
}
else
{
// Mask any secret values that might be in non-secret contexts
variable.Value = MaskSecrets(str.Value);
}
variable.Type = "string";
variable.VariablesReference = 0;
break;
case NumberContextData num:
variable.Value = num.ToString();
variable.Type = "number";
variable.VariablesReference = 0;
break;
case BooleanContextData boolVal:
variable.Value = boolVal.Value ? "true" : "false";
variable.Type = "boolean";
variable.VariablesReference = 0;
break;
case DictionaryContextData dict:
variable.Value = $"Object ({dict.Count} properties)";
variable.Type = "object";
variable.VariablesReference = RegisterVariableReference(dict, childPath);
variable.NamedVariables = dict.Count;
break;
case CaseSensitiveDictionaryContextData csDict:
variable.Value = $"Object ({csDict.Count} properties)";
variable.Type = "object";
variable.VariablesReference = RegisterVariableReference(csDict, childPath);
variable.NamedVariables = csDict.Count;
break;
case ArrayContextData array:
variable.Value = $"Array ({array.Count} items)";
variable.Type = "array";
variable.VariablesReference = RegisterVariableReference(array, childPath);
variable.IndexedVariables = array.Count;
break;
default:
// Unknown type - convert to string representation
var rawValue = value.ToJToken()?.ToString() ?? "unknown";
variable.Value = MaskSecrets(rawValue);
variable.Type = value.GetType().Name;
variable.VariablesReference = 0;
break;
}
return variable;
}
/// <summary>
/// Registers a nested variable reference and returns its ID.
/// </summary>
private int RegisterVariableReference(PipelineContextData data, string path)
{
var reference = _nextVariableReference++;
_variableReferences[reference] = (data, path);
return reference;
}
/// <summary>
/// Masks any secret values in the string using the host context's secret masker.
/// </summary>
private string MaskSecrets(string value)
{
if (string.IsNullOrEmpty(value))
{
return value ?? string.Empty;
}
return _hostContext.SecretMasker.MaskSecrets(value);
}
}
}

View File

@@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// Represents a snapshot of job state captured just before a step executes.
/// Created when user issues next/continue command, after any REPL modifications.
/// Used for step-back (time-travel) debugging.
/// </summary>
public sealed class StepCheckpoint
{
/// <summary>
/// Index of this checkpoint in the checkpoints list.
/// Used when restoring to identify which checkpoint to restore to.
/// </summary>
public int CheckpointIndex { get; set; }
/// <summary>
/// Zero-based index of the step in the job.
/// </summary>
public int StepIndex { get; set; }
/// <summary>
/// Display name of the step this checkpoint was created for.
/// </summary>
public string StepDisplayName { get; set; }
/// <summary>
/// Snapshot of Global.EnvironmentVariables.
/// </summary>
public Dictionary<string, string> EnvironmentVariables { get; set; }
/// <summary>
/// Snapshot of ExpressionValues["env"] context data.
/// </summary>
public Dictionary<string, string> EnvContextData { get; set; }
/// <summary>
/// Snapshot of Global.PrependPath.
/// </summary>
public List<string> PrependPath { get; set; }
/// <summary>
/// Snapshot of job result.
/// </summary>
public TaskResult? JobResult { get; set; }
/// <summary>
/// Snapshot of job status.
/// </summary>
public ActionResult? JobStatus { get; set; }
/// <summary>
/// Snapshot of steps context (outputs, outcomes, conclusions).
/// Key is "{scopeName}/{stepName}", value is the step's state.
/// </summary>
public Dictionary<string, StepStateSnapshot> StepsSnapshot { get; set; }
/// <summary>
/// The step that was about to execute (for re-running).
/// </summary>
public IStep CurrentStep { get; set; }
/// <summary>
/// Steps remaining in the queue after CurrentStep.
/// </summary>
public List<IStep> RemainingSteps { get; set; }
/// <summary>
/// When this checkpoint was created.
/// </summary>
public DateTime CreatedAt { get; set; }
}
/// <summary>
/// Snapshot of a single step's state in the steps context.
/// </summary>
public sealed class StepStateSnapshot
{
public ActionResult? Outcome { get; set; }
public ActionResult? Conclusion { get; set; }
public Dictionary<string, string> Outputs { get; set; }
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
@@ -95,6 +95,7 @@ namespace GitHub.Runner.Worker
// timeline record update methods
void Start(string currentOperation = null);
TaskResult Complete(TaskResult? result = null, string currentOperation = null, string resultCode = null);
void ResetForRerun();
void SetEnvContext(string name, string value);
void SetRunnerContext(string name, string value);
string GetGitHubContext(string name);
@@ -545,6 +546,29 @@ namespace GitHub.Runner.Worker
return Result.Value;
}
/// <summary>
/// Resets the execution context for re-running (e.g., after step-back in DAP debugging).
/// Creates a new CancellationTokenSource since the previous one was disposed in Complete().
/// </summary>
public void ResetForRerun()
{
// Create a new CancellationTokenSource since the old one was disposed
_cancellationTokenSource = new CancellationTokenSource();
// Reset record state to allow re-execution
_record.State = TimelineRecordState.Pending;
_record.FinishTime = null;
_record.PercentComplete = 0;
_record.ResultCode = null;
// Reset result
Result = null;
Outcome = null;
// Reset the force completed task
_forceCompleted = new TaskCompletionSource<int>();
}
public void UpdateGlobalStepsContext()
{
// Skip if generated context name. Generated context names start with "__". After 3.2 the server will never send an empty context name.
@@ -1397,7 +1421,7 @@ namespace GitHub.Runner.Worker
public static IPipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
{
// Create wrapper?
if ((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareTemplateEvaluator) ?? false) || StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_TEMPLATE_EVALUATOR")))
if ((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false) || StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER")))
{
return (context as ExecutionContext).ToPipelineTemplateEvaluatorInternal(traceWriter);
}

View File

@@ -11,10 +11,5 @@ namespace GitHub.Runner.Worker
var isContainerHooksPathSet = !string.IsNullOrEmpty(Environment.GetEnvironmentVariable(Constants.Hooks.ContainerHooksPath));
return isContainerHookFeatureFlagSet && isContainerHooksPathSet;
}
public static bool IsContainerActionRunnerTempEnabled(Variables variables)
{
return variables?.GetBoolean(Constants.Runner.Features.ContainerActionRunnerTemp) ?? false;
}
}
}

View File

@@ -30,5 +30,6 @@ namespace GitHub.Runner.Worker
public string InfrastructureFailureCategory { get; set; }
public JObject ContainerHookState { get; set; }
public bool HasTemplateEvaluatorMismatch { get; set; }
public bool HasActionManifestMismatch { get; set; }
}
}

View File

@@ -187,7 +187,7 @@ namespace GitHub.Runner.Worker.Handlers
if (Data.Outputs != null)
{
// Evaluate the outputs in the steps context to easily retrieve the values
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
var actionManifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
// Format ExpressionValues to Dictionary<string, PipelineContextData>
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);

View File

@@ -135,7 +135,7 @@ namespace GitHub.Runner.Worker.Handlers
var extraExpressionValues = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
extraExpressionValues["inputs"] = inputsContext;
var manifestManager = HostContext.GetService<IActionManifestManager>();
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
if (Data.Arguments != null)
{
container.ContainerEntryPointArgs = "";
@@ -191,19 +191,13 @@ namespace GitHub.Runner.Worker.Handlers
ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory));
container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
if (FeatureManager.IsContainerActionRunnerTempEnabled(ExecutionContext.Global.Variables))
{
container.MountVolumes.Add(new MountVolume(tempDirectory, "/github/runner_temp"));
}
container.MountVolumes.Add(new MountVolume(tempDirectory, "/github/runner_temp"));
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
container.MountVolumes.Add(new MountVolume(tempFileCommandDirectory, "/github/file_commands"));
container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace"));
if (FeatureManager.IsContainerActionRunnerTempEnabled(ExecutionContext.Global.Variables))
{
container.AddPathTranslateMapping(tempDirectory, "/github/runner_temp");
}
container.AddPathTranslateMapping(tempDirectory, "/github/runner_temp");
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
container.AddPathTranslateMapping(tempFileCommandDirectory, "/github/file_commands");
@@ -245,6 +239,14 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_RESULTS_URL"] = resultsUrl;
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
foreach (var variable in this.Environment)
{
container.ContainerEnvironmentVariables[variable.Key] = container.TranslateToContainerPath(variable.Value);

View File

@@ -77,6 +77,14 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_CACHE_SERVICE_V2"] = bool.TrueString;
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
// Resolve the target script.
string target = null;
if (stage == ActionRunStage.Main)

View File

@@ -318,6 +318,14 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
ExecutionContext.Debug($"{fileName} {arguments}");
Inputs.TryGetValue("standardInInput", out var standardInInput);

View File

@@ -112,6 +112,13 @@ namespace GitHub.Runner.Worker
groupName = "Machine Setup Info";
}
// not output internal groups
if (groupName.StartsWith("_internal_", StringComparison.OrdinalIgnoreCase))
{
jobContext.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = info.Detail });
continue;
}
context.Output($"##[group]{groupName}");
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -13,6 +13,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Dap;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
@@ -112,6 +113,8 @@ namespace GitHub.Runner.Worker
IExecutionContext jobContext = null;
CancellationTokenRegistration? runnerShutdownRegistration = null;
IDapServer dapServer = null;
CancellationTokenRegistration? dapCancellationRegistration = null;
try
{
// Create the job execution context.
@@ -159,6 +162,61 @@ namespace GitHub.Runner.Worker
if (jobContext.Global.WriteDebug)
{
jobContext.SetRunnerContext("debug", "1");
// Start DAP server for interactive debugging
// This allows debugging workflow jobs with DAP-compatible editors (nvim-dap, VS Code, etc.)
try
{
var port = 4711;
var portEnv = Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT");
if (!string.IsNullOrEmpty(portEnv) && int.TryParse(portEnv, out var customPort))
{
port = customPort;
}
dapServer = HostContext.GetService<IDapServer>();
var debugSession = HostContext.GetService<IDapDebugSession>();
// Wire up the server and session
dapServer.SetSession(debugSession);
debugSession.SetDapServer(dapServer);
await dapServer.StartAsync(port, jobRequestCancellationToken);
Trace.Info($"DAP server listening on port {port}");
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
jobContext.Output($"Connect your DAP client (nvim-dap, VS Code, etc.) to attach to this job.");
// Block until debugger connects
await dapServer.WaitForConnectionAsync(jobRequestCancellationToken);
Trace.Info("DAP client connected, continuing job execution");
jobContext.Output("Debugger connected. Job execution will pause before each step.");
// Register cancellation handler to properly terminate DAP session on job cancellation
try
{
dapCancellationRegistration = jobRequestCancellationToken.Register(() =>
{
Trace.Info("Job cancelled - terminating DAP session");
debugSession.CancelSession();
});
}
catch (Exception ex)
{
Trace.Warning($"Failed to register DAP cancellation handler: {ex.Message}");
}
}
catch (OperationCanceledException)
{
// Job was cancelled before debugger connected
Trace.Info("Job cancelled while waiting for DAP client connection");
}
catch (Exception ex)
{
// Log but don't fail the job if DAP server fails to start
Trace.Warning($"Failed to start DAP server: {ex.Message}");
jobContext.Warning($"DAP debugging unavailable: {ex.Message}");
dapServer = null;
}
}
jobContext.SetRunnerContext("os", VarUtil.OS);
@@ -259,6 +317,23 @@ namespace GitHub.Runner.Worker
runnerShutdownRegistration = null;
}
// Dispose DAP cancellation registration
dapCancellationRegistration?.Dispose();
// Stop DAP server if it was started
if (dapServer != null)
{
try
{
Trace.Info("Stopping DAP server");
await dapServer.StopAsync();
}
catch (Exception ex)
{
Trace.Warning($"Error stopping DAP server: {ex.Message}");
}
}
await ShutdownQueue(throwOnFailure: false);
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using System;
@@ -19,12 +19,31 @@ namespace GitHub.Runner.Worker
private static readonly Regex _propertyRegex = new("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
private readonly DictionaryContextData _contextData = new();
/// <summary>
/// Optional callback for debug logging. When set, will be called with debug messages
/// for all StepsContext mutations.
/// </summary>
public Action<string> OnDebugLog { get; set; }
private void DebugLog(string message)
{
OnDebugLog?.Invoke(message);
}
private static string TruncateValue(string value, int maxLength = 50)
{
if (string.IsNullOrEmpty(value)) return "(empty)";
if (value.Length <= maxLength) return value;
return value.Substring(0, maxLength) + "...";
}
/// <summary>
/// Clears memory for a composite action's isolated "steps" context, after the action
/// is finished executing.
/// </summary>
public void ClearScope(string scopeName)
{
DebugLog($"[StepsContext] ClearScope: scope='{scopeName ?? "(root)"}'");
if (_contextData.TryGetValue(scopeName, out _))
{
_contextData[scopeName] = new DictionaryContextData();
@@ -78,6 +97,7 @@ namespace GitHub.Runner.Worker
{
reference = $"steps['{stepName}']['outputs']['{outputName}']";
}
DebugLog($"[StepsContext] SetOutput: step='{stepName}', output='{outputName}', value='{TruncateValue(value)}'");
}
public void SetConclusion(
@@ -86,7 +106,9 @@ namespace GitHub.Runner.Worker
ActionResult conclusion)
{
var step = GetStep(scopeName, stepName);
step["conclusion"] = new StringContextData(conclusion.ToString().ToLowerInvariant());
var conclusionStr = conclusion.ToString().ToLowerInvariant();
step["conclusion"] = new StringContextData(conclusionStr);
DebugLog($"[StepsContext] SetConclusion: step='{stepName}', conclusion={conclusionStr}");
}
public void SetOutcome(
@@ -95,7 +117,9 @@ namespace GitHub.Runner.Worker
ActionResult outcome)
{
var step = GetStep(scopeName, stepName);
step["outcome"] = new StringContextData(outcome.ToString().ToLowerInvariant());
var outcomeStr = outcome.ToString().ToLowerInvariant();
step["outcome"] = new StringContextData(outcomeStr);
DebugLog($"[StepsContext] SetOutcome: step='{stepName}', outcome={outcomeStr}");
}
private DictionaryContextData GetStep(string scopeName, string stepName)

View File

@@ -1,5 +1,6 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
@@ -10,6 +11,7 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Dap;
using GitHub.Runner.Worker.Expressions;
namespace GitHub.Runner.Worker
@@ -50,6 +52,13 @@ namespace GitHub.Runner.Worker
jobContext.JobContext.Status = (jobContext.Result ?? TaskResult.Succeeded).ToActionResult();
var scopeInputs = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
bool checkPostJobActions = false;
// Get debug session for DAP debugging support
// The session's IsActive property determines if debugging is actually enabled
var debugSession = HostContext.GetService<IDapDebugSession>();
bool isFirstStep = true;
int stepIndex = 0; // Track step index for checkpoints
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
{
if (jobContext.JobSteps.Count == 0 && !checkPostJobActions)
@@ -65,6 +74,9 @@ namespace GitHub.Runner.Worker
var step = jobContext.JobSteps.Dequeue();
// Capture remaining steps for potential checkpoint (before we modify the queue)
var remainingSteps = jobContext.JobSteps.ToList();
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
ArgUtil.NotNull(step.ExecutionContext.Global, nameof(step.ExecutionContext.Global));
@@ -74,6 +86,8 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.Start();
// Expression functions
// Clear first to handle step-back scenarios where the same step may be re-processed
step.ExecutionContext.ExpressionFunctions.Clear();
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
@@ -181,6 +195,61 @@ namespace GitHub.Runner.Worker
}
}
// Pause for DAP debugger BEFORE step execution
// This happens after expression values are set up so the debugger can inspect variables
if (debugSession?.IsActive == true)
{
// Store step info for checkpoint creation later
debugSession.SetPendingStepInfo(step, jobContext, stepIndex, remainingSteps);
// Pause and wait for user command (next/continue/stepBack/reverseContinue)
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep, jobContext.CancellationToken);
isFirstStep = false;
// Check if user requested to step back
if (debugSession.HasPendingRestore)
{
var checkpoint = debugSession.ConsumeRestoredCheckpoint();
if (checkpoint != null)
{
// Restore the checkpoint state using the correct checkpoint index
debugSession.RestoreCheckpoint(checkpoint.CheckpointIndex, jobContext);
// Re-queue the steps from checkpoint
while (jobContext.JobSteps.Count > 0)
{
jobContext.JobSteps.Dequeue();
}
// Queue the checkpoint's step and remaining steps
// Reset execution context for rerun since CancellationTokenSource was disposed in Complete()
checkpoint.CurrentStep.ExecutionContext.ResetForRerun();
jobContext.JobSteps.Enqueue(checkpoint.CurrentStep);
foreach (var remainingStep in checkpoint.RemainingSteps)
{
remainingStep.ExecutionContext.ResetForRerun();
jobContext.JobSteps.Enqueue(remainingStep);
}
// Reset step index to checkpoint's index
stepIndex = checkpoint.StepIndex;
// Clear pending step info since we're not executing this step
debugSession.ClearPendingStepInfo();
// Skip to next iteration - will process restored step
continue;
}
}
// User pressed next/continue - create checkpoint NOW
// This captures any REPL modifications made while paused
if (debugSession.ShouldCreateCheckpoint())
{
debugSession.CreateCheckpointForPendingStep(jobContext);
}
}
// Evaluate condition
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
@@ -238,6 +307,9 @@ namespace GitHub.Runner.Worker
jobCancelRegister?.Dispose();
jobCancelRegister = null;
}
// Clear pending step info after step completes
debugSession?.ClearPendingStepInfo();
}
}
@@ -253,8 +325,20 @@ namespace GitHub.Runner.Worker
Trace.Info($"No need for updating job result with current step result '{step.ExecutionContext.Result}'.");
}
// Notify DAP debugger AFTER step execution
if (debugSession?.IsActive == true)
{
debugSession.OnStepCompleted(step);
}
// Increment step index for checkpoint tracking
stepIndex++;
Trace.Info($"Current state: job state = '{jobContext.Result}'");
}
// Notify DAP debugger that the job has completed
debugSession?.OnJobCompleted();
}
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)

View File

@@ -1,14 +1,14 @@
using GitHub.Services.Common.Diagnostics;
using System;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Linq;
using GitHub.Services.Common.Diagnostics;
using GitHub.Services.Common.Internal;
namespace GitHub.Services.Common

View File

@@ -146,6 +146,7 @@ namespace GitHub.Services.Common
sockEx.SocketErrorCode == SocketError.TimedOut ||
sockEx.SocketErrorCode == SocketError.HostDown ||
sockEx.SocketErrorCode == SocketError.HostUnreachable ||
sockEx.SocketErrorCode == SocketError.HostNotFound ||
sockEx.SocketErrorCode == SocketError.TryAgain)
{
return true;

View File

@@ -9,6 +9,7 @@ namespace GitHub.DistributedTask.Expressions2
{
static ExpressionConstants()
{
AddFunction<Case>("case", 3, Byte.MaxValue);
AddFunction<Contains>("contains", 2, 2);
AddFunction<EndsWith>("endsWith", 2, 2);
AddFunction<Format>("format", 1, Byte.MaxValue);

View File

@@ -17,9 +17,10 @@ namespace GitHub.DistributedTask.Expressions2
String expression,
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions)
IEnumerable<IFunctionInfo> functions,
Boolean allowCaseFunction = true)
{
var context = new ParseContext(expression, trace, namedValues, functions);
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction);
context.Trace.Info($"Parsing expression: <{expression}>");
return CreateTree(context);
}
@@ -349,6 +350,10 @@ namespace GitHub.DistributedTask.Expressions2
{
throw new ParseException(ParseExceptionKind.TooManyParameters, token: @operator, expression: context.Expression);
}
else if (functionInfo.Name.Equals("case", StringComparison.OrdinalIgnoreCase) && function.Parameters.Count % 2 == 0)
{
throw new ParseException(ParseExceptionKind.EvenParameters, token: @operator, expression: context.Expression);
}
}
/// <summary>
@@ -411,6 +416,12 @@ namespace GitHub.DistributedTask.Expressions2
String name,
out IFunctionInfo functionInfo)
{
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
{
functionInfo = null;
return false;
}
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
}
@@ -418,6 +429,7 @@ namespace GitHub.DistributedTask.Expressions2
private sealed class ParseContext
{
public Boolean AllowUnknownKeywords;
public Boolean AllowCaseFunction;
public readonly String Expression;
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
@@ -433,7 +445,8 @@ namespace GitHub.DistributedTask.Expressions2
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowUnknownKeywords = false)
Boolean allowUnknownKeywords = false,
Boolean allowCaseFunction = true)
{
Expression = expression ?? String.Empty;
if (Expression.Length > ExpressionConstants.MaxLength)
@@ -454,6 +467,7 @@ namespace GitHub.DistributedTask.Expressions2
LexicalAnalyzer = new LexicalAnalyzer(Expression);
AllowUnknownKeywords = allowUnknownKeywords;
AllowCaseFunction = allowCaseFunction;
}
private class NoOperationTraceWriter : ITraceWriter

View File

@@ -29,6 +29,9 @@ namespace GitHub.DistributedTask.Expressions2
case ParseExceptionKind.TooManyParameters:
description = "Too many parameters supplied";
break;
case ParseExceptionKind.EvenParameters:
description = "Even number of parameters supplied, requires an odd number of parameters";
break;
case ParseExceptionKind.UnexpectedEndOfExpression:
description = "Unexpected end of expression";
break;

View File

@@ -6,6 +6,7 @@
ExceededMaxLength,
TooFewParameters,
TooManyParameters,
EvenParameters,
UnexpectedEndOfExpression,
UnexpectedSymbol,
UnrecognizedFunction,

View File

@@ -0,0 +1,45 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.Expressions.Data;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class Case : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
// Validate argument count - must be odd (pairs of predicate-result plus default)
if (Parameters.Count % 2 == 0)
{
throw new InvalidOperationException("case requires an odd number of arguments");
}
// Evaluate predicate-result pairs
for (var i = 0; i < Parameters.Count - 1; i += 2)
{
var predicate = Parameters[i].Evaluate(context);
// Predicate must be a boolean
if (predicate.Kind != ValueKind.Boolean)
{
throw new InvalidOperationException("case predicate must evaluate to a boolean value");
}
// If predicate is true, return the corresponding result
if ((Boolean)predicate.Value)
{
var result = Parameters[i + 1].Evaluate(context);
return result.Value;
}
}
// No predicate matched, return default (last argument)
var defaultResult = Parameters[Parameters.Count - 1].Evaluate(context);
return defaultResult.Value;
}
}
}

View File

@@ -86,6 +86,12 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal ITraceWriter TraceWriter { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the case expression function is allowed.
/// Defaults to true. Set to false to disable the case function.
/// </summary>
internal Boolean AllowCaseFunction { get; set; } = true;
private IDictionary<String, Int32> FileIds
{
get

View File

@@ -57,7 +57,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -94,7 +94,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -123,7 +123,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -152,7 +152,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,

View File

@@ -663,7 +663,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
}
catch (Exception ex)
{

View File

@@ -10,6 +10,7 @@ namespace GitHub.Actions.Expressions
{
static ExpressionConstants()
{
AddFunction<Case>("case", 3, Byte.MaxValue);
AddFunction<Contains>("contains", 2, 2);
AddFunction<EndsWith>("endsWith", 2, 2);
AddFunction<Format>("format", 1, Byte.MaxValue);

View File

@@ -17,9 +17,10 @@ namespace GitHub.Actions.Expressions
String expression,
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions)
IEnumerable<IFunctionInfo> functions,
Boolean allowCaseFunction = true)
{
var context = new ParseContext(expression, trace, namedValues, functions);
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
context.Trace.Info($"Parsing expression: <{expression}>");
return CreateTree(context);
}
@@ -349,6 +350,10 @@ namespace GitHub.Actions.Expressions
{
throw new ParseException(ParseExceptionKind.TooManyParameters, token: @operator, expression: context.Expression);
}
else if (functionInfo.Name.Equals("case", StringComparison.OrdinalIgnoreCase) && function.Parameters.Count % 2 == 0)
{
throw new ParseException(ParseExceptionKind.EvenParameters, token: @operator, expression: context.Expression);
}
}
/// <summary>
@@ -411,6 +416,12 @@ namespace GitHub.Actions.Expressions
String name,
out IFunctionInfo functionInfo)
{
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
{
functionInfo = null;
return false;
}
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
}
@@ -418,6 +429,7 @@ namespace GitHub.Actions.Expressions
private sealed class ParseContext
{
public Boolean AllowUnknownKeywords;
public Boolean AllowCaseFunction;
public readonly String Expression;
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
@@ -433,7 +445,8 @@ namespace GitHub.Actions.Expressions
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowUnknownKeywords = false)
Boolean allowUnknownKeywords = false,
Boolean allowCaseFunction = true)
{
Expression = expression ?? String.Empty;
if (Expression.Length > ExpressionConstants.MaxLength)
@@ -454,6 +467,7 @@ namespace GitHub.Actions.Expressions
LexicalAnalyzer = new LexicalAnalyzer(Expression);
AllowUnknownKeywords = allowUnknownKeywords;
AllowCaseFunction = allowCaseFunction;
}
private class NoOperationTraceWriter : ITraceWriter
@@ -468,4 +482,4 @@ namespace GitHub.Actions.Expressions
}
}
}
}
}

View File

@@ -29,6 +29,9 @@ namespace GitHub.Actions.Expressions
case ParseExceptionKind.TooManyParameters:
description = "Too many parameters supplied";
break;
case ParseExceptionKind.EvenParameters:
description = "Even number of parameters supplied, requires an odd number of parameters";
break;
case ParseExceptionKind.UnexpectedEndOfExpression:
description = "Unexpected end of expression";
break;

View File

@@ -6,6 +6,7 @@ namespace GitHub.Actions.Expressions
ExceededMaxLength,
TooFewParameters,
TooManyParameters,
EvenParameters,
UnexpectedEndOfExpression,
UnexpectedSymbol,
UnrecognizedFunction,

View File

@@ -0,0 +1,45 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.Expressions.Data;
namespace GitHub.Actions.Expressions.Sdk.Functions
{
internal sealed class Case : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
// Validate argument count - must be odd (pairs of predicate-result plus default)
if (Parameters.Count % 2 == 0)
{
throw new InvalidOperationException("case requires an odd number of arguments");
}
// Evaluate predicate-result pairs
for (var i = 0; i < Parameters.Count - 1; i += 2)
{
var predicate = Parameters[i].Evaluate(context);
// Predicate must be a boolean
if (predicate.Kind != ValueKind.Boolean)
{
throw new InvalidOperationException("case predicate must evaluate to a boolean value");
}
// If predicate is true, return the corresponding result
if ((Boolean)predicate.Value)
{
var result = Parameters[i + 1].Evaluate(context);
return result.Value;
}
}
// No predicate matched, return default (last argument)
var defaultResult = Parameters[Parameters.Count - 1].Evaluate(context);
return defaultResult.Value;
}
}
}

View File

@@ -14,7 +14,11 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Azure.Storage.Blobs" Version="12.25.1" />
<InternalsVisibleTo Include="Test" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Azure.Storage.Blobs" Version="12.27.0" />
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="6.0.0" />

View File

@@ -1775,7 +1775,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
}
catch (Exception ex)
{

View File

@@ -113,6 +113,12 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating
/// </summary>
internal Boolean StrictJsonParsing { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the case expression function is allowed.
/// Defaults to true. Set to false to disable the case function.
/// </summary>
internal Boolean AllowCaseFunction { get; set; } = true;
internal ITraceWriter TraceWriter { get; set; }
private IDictionary<String, Int32> FileIds

View File

@@ -55,7 +55,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -93,7 +93,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -123,7 +123,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -153,7 +153,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,

View File

@@ -2504,14 +2504,20 @@ runs:
_pluginManager = new Mock<IRunnerPluginManager>();
_pluginManager.Setup(x => x.GetPluginAction(It.IsAny<string>())).Returns(new RunnerPluginActionInfo() { PluginTypeName = "plugin.class, plugin", PostPluginTypeName = "plugin.cleanup, plugin" });
var actionManifest = new ActionManifestManager();
actionManifest.Initialize(_hc);
var actionManifestLegacy = new ActionManifestManagerLegacy();
actionManifestLegacy.Initialize(_hc);
_hc.SetSingleton<IActionManifestManagerLegacy>(actionManifestLegacy);
var actionManifestNew = new ActionManifestManager();
actionManifestNew.Initialize(_hc);
_hc.SetSingleton<IActionManifestManager>(actionManifestNew);
var actionManifestWrapper = new ActionManifestManagerWrapper();
actionManifestWrapper.Initialize(_hc);
_hc.SetSingleton<IDockerCommandManager>(_dockerManager.Object);
_hc.SetSingleton<IJobServer>(_jobServer.Object);
_hc.SetSingleton<ILaunchServer>(_launchServer.Object);
_hc.SetSingleton<IRunnerPluginManager>(_pluginManager.Object);
_hc.SetSingleton<IActionManifestManager>(actionManifest);
_hc.SetSingleton<IActionManifestManagerWrapper>(actionManifestWrapper);
_hc.SetSingleton<IHttpClientHandlerFactory>(new HttpClientHandlerFactory());
_configurationStore = new Mock<IConfigurationStore>();

View File

@@ -1,9 +1,11 @@
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.Actions.Expressions;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using GitHub.Actions.Expressions.Data;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Expressions;
using GitHub.Actions.WorkflowParser;
using LegacyContextData = GitHub.DistributedTask.Pipelines.ContextData;
using LegacyExpressions = GitHub.DistributedTask.Expressions2;
using Moq;
using System;
using System.Collections.Generic;
@@ -49,7 +51,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -93,7 +95,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -139,7 +141,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -185,7 +187,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -231,7 +233,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -276,7 +278,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
}
@@ -314,7 +316,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -357,7 +359,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
var containerAction = result.Execution as ContainerActionExecutionDataNew;
Assert.Equal("docker://ubuntu:18.04", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
@@ -826,10 +828,10 @@ namespace GitHub.Runner.Common.Tests.Worker
arguments.Add(new BasicExpressionToken(null, null, null, "inputs.greeting"));
arguments.Add(new StringToken(null, null, null, "test"));
var inputsContext = new DictionaryContextData();
inputsContext.Add("greeting", new StringContextData("hello"));
var inputsContext = new DictionaryExpressionData();
inputsContext.Add("greeting", new StringExpressionData("hello"));
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
var evaluateContext = new Dictionary<string, ExpressionData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
//Act
@@ -863,10 +865,10 @@ namespace GitHub.Runner.Common.Tests.Worker
environment.Add(new StringToken(null, null, null, "hello"), new BasicExpressionToken(null, null, null, "inputs.greeting"));
environment.Add(new StringToken(null, null, null, "test"), new StringToken(null, null, null, "test"));
var inputsContext = new DictionaryContextData();
inputsContext.Add("greeting", new StringContextData("hello"));
var inputsContext = new DictionaryExpressionData();
inputsContext.Add("greeting", new StringExpressionData("hello"));
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
var evaluateContext = new Dictionary<string, ExpressionData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
//Act
@@ -896,17 +898,17 @@ namespace GitHub.Runner.Common.Tests.Worker
var actionManifest = new ActionManifestManager();
actionManifest.Initialize(_hc);
_ec.Object.ExpressionValues["github"] = new DictionaryContextData
_ec.Object.ExpressionValues["github"] = new LegacyContextData.DictionaryContextData
{
{ "ref", new StringContextData("refs/heads/main") },
{ "ref", new LegacyContextData.StringContextData("refs/heads/main") },
};
_ec.Object.ExpressionValues["strategy"] = new DictionaryContextData();
_ec.Object.ExpressionValues["matrix"] = new DictionaryContextData();
_ec.Object.ExpressionValues["steps"] = new DictionaryContextData();
_ec.Object.ExpressionValues["job"] = new DictionaryContextData();
_ec.Object.ExpressionValues["runner"] = new DictionaryContextData();
_ec.Object.ExpressionValues["env"] = new DictionaryContextData();
_ec.Object.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>("hashFiles", 1, 255));
_ec.Object.ExpressionValues["strategy"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["matrix"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["steps"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["job"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["runner"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionValues["env"] = new LegacyContextData.DictionaryContextData();
_ec.Object.ExpressionFunctions.Add(new LegacyExpressions.FunctionInfo<GitHub.Runner.Worker.Expressions.HashFilesFunction>("hashFiles", 1, 255));
//Act
var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", new StringToken(null, null, null, "defaultValue"));
@@ -934,6 +936,9 @@ namespace GitHub.Runner.Common.Tests.Worker
// Test host context.
_hc = new TestHostContext(this, name);
var expressionValues = new LegacyContextData.DictionaryContextData();
var expressionFunctions = new List<LegacyExpressions.IFunctionInfo>();
_ec = new Mock<IExecutionContext>();
_ec.Setup(x => x.Global)
.Returns(new GlobalContext
@@ -943,8 +948,8 @@ namespace GitHub.Runner.Common.Tests.Worker
WriteDebug = true,
});
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.ExpressionValues).Returns(expressionValues);
_ec.Setup(x => x.ExpressionFunctions).Returns(expressionFunctions);
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
}

View File

@@ -0,0 +1,957 @@
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Expressions;
using Moq;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using Xunit;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class ActionManifestManagerLegacyL0
{
private CancellationTokenSource _ecTokenSource;
private Mock<IExecutionContext> _ec;
private TestHostContext _hc;
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile_Pre()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_init.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("init.sh", containerAction.Pre);
Assert.Equal("success()", containerAction.InitCondition);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile_Post()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_cleanup.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("cleanup.sh", containerAction.Post);
Assert.Equal("failure()", containerAction.CleanupCondition);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile_Pre_DefaultCondition()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_init_default.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("init.sh", containerAction.Pre);
Assert.Equal("always()", containerAction.InitCondition);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile_Post_DefaultCondition()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_cleanup_default.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("cleanup.sh", containerAction.Post);
Assert.Equal("always()", containerAction.CleanupCondition);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_NoArgsNoEnv()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_noargs_noenv_noentrypoint.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_Dockerfile_Expression()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_arg_env_expression.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("Dockerfile", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("${{ inputs.greeting }}", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("${{ inputs.entryPoint }}", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ContainerAction_DockerHub()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerhubaction.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Container, result.Execution.ExecutionType);
var containerAction = result.Execution as ContainerActionExecutionData;
Assert.Equal("docker://ubuntu:18.04", containerAction.Image);
Assert.Equal("main.sh", containerAction.EntryPoint);
Assert.Equal("bzz", containerAction.Arguments[0].ToString());
Assert.Equal("Token", containerAction.Environment[0].Key.ToString());
Assert.Equal("foo", containerAction.Environment[0].Value.ToString());
Assert.Equal("Url", containerAction.Environment[1].Key.ToString());
Assert.Equal("bar", containerAction.Environment[1].Value.ToString());
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_NodeAction()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("node12", nodeAction.NodeVersion);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_Node16Action()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "node16action.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("node16", nodeAction.NodeVersion);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_Node20Action()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "node20action.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("node20", nodeAction.NodeVersion);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_Node24Action()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "node24action.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("node24", nodeAction.NodeVersion);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_NodeAction_Pre()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction_init.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("init.js", nodeAction.Pre);
Assert.Equal("cancelled()", nodeAction.InitCondition);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_NodeAction_Init_DefaultCondition()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction_init_default.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("init.js", nodeAction.Pre);
Assert.Equal("always()", nodeAction.InitCondition);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_NodeAction_Cleanup()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction_cleanup.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("cleanup.js", nodeAction.Post);
Assert.Equal("cancelled()", nodeAction.CleanupCondition);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_NodeAction_Cleanup_DefaultCondition()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction_cleanup_default.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(1, result.Deprecated.Count);
Assert.True(result.Deprecated.ContainsKey("greeting"));
result.Deprecated.TryGetValue("greeting", out string value);
Assert.Equal("This property has been deprecated", value);
Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType);
var nodeAction = result.Execution as NodeJSActionExecutionData;
Assert.Equal("main.js", nodeAction.Script);
Assert.Equal("cleanup.js", nodeAction.Post);
Assert.Equal("always()", nodeAction.CleanupCondition);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_PluginAction()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "pluginaction.yml"));
//Assert
Assert.Equal("Hello World", result.Name);
Assert.Equal("Greet the world and record the time", result.Description);
Assert.Equal(2, result.Inputs.Count);
Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value);
Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value);
Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value);
Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value);
Assert.Equal(ActionExecutionType.Plugin, result.Execution.ExecutionType);
var pluginAction = result.Execution as PluginActionExecutionData;
Assert.Equal("someplugin", pluginAction.Plugin);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_ConditionalCompositeAction()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
//Act
var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "conditional_composite_action.yml"));
//Assert
Assert.Equal("Conditional Composite", result.Name);
Assert.Equal(ActionExecutionType.Composite, result.Execution.ExecutionType);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Load_CompositeActionNoUsing()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
var action_path = Path.Combine(TestUtil.GetTestDataPath(), "composite_action_without_using_token.yml");
//Assert
var err = Assert.Throws<ArgumentException>(() => actionManifest.Load(_ec.Object, action_path));
Assert.Contains($"Failed to load {action_path}", err.Message);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.")), It.IsAny<ExecutionContextLogOptions>()), Times.Once);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Evaluate_ContainerAction_Args()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
var arguments = new SequenceToken(null, null, null);
arguments.Add(new BasicExpressionToken(null, null, null, "inputs.greeting"));
arguments.Add(new StringToken(null, null, null, "test"));
var inputsContext = new DictionaryContextData();
inputsContext.Add("greeting", new StringContextData("hello"));
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
//Act
var result = actionManifest.EvaluateContainerArguments(_ec.Object, arguments, evaluateContext);
//Assert
Assert.Equal("hello", result[0]);
Assert.Equal("test", result[1]);
Assert.Equal(2, result.Count);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Evaluate_ContainerAction_Env()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
var environment = new MappingToken(null, null, null);
environment.Add(new StringToken(null, null, null, "hello"), new BasicExpressionToken(null, null, null, "inputs.greeting"));
environment.Add(new StringToken(null, null, null, "test"), new StringToken(null, null, null, "test"));
var inputsContext = new DictionaryContextData();
inputsContext.Add("greeting", new StringContextData("hello"));
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
//Act
var result = actionManifest.EvaluateContainerEnvironment(_ec.Object, environment, evaluateContext);
//Assert
Assert.Equal("hello", result["hello"]);
Assert.Equal("test", result["test"]);
Assert.Equal(2, result.Count);
}
finally
{
Teardown();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void Evaluate_Default_Input()
{
try
{
//Arrange
Setup();
var actionManifest = new ActionManifestManagerLegacy();
actionManifest.Initialize(_hc);
_ec.Object.ExpressionValues["github"] = new DictionaryContextData
{
{ "ref", new StringContextData("refs/heads/main") },
};
_ec.Object.ExpressionValues["strategy"] = new DictionaryContextData();
_ec.Object.ExpressionValues["matrix"] = new DictionaryContextData();
_ec.Object.ExpressionValues["steps"] = new DictionaryContextData();
_ec.Object.ExpressionValues["job"] = new DictionaryContextData();
_ec.Object.ExpressionValues["runner"] = new DictionaryContextData();
_ec.Object.ExpressionValues["env"] = new DictionaryContextData();
_ec.Object.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>("hashFiles", 1, 255));
//Act
var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", new StringToken(null, null, null, "defaultValue"));
//Assert
Assert.Equal("defaultValue", result);
//Act
result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", new BasicExpressionToken(null, null, null, "github.ref"));
//Assert
Assert.Equal("refs/heads/main", result);
}
finally
{
Teardown();
}
}
private void Setup([CallerMemberName] string name = "")
{
_ecTokenSource?.Dispose();
_ecTokenSource = new CancellationTokenSource();
// Test host context.
_hc = new TestHostContext(this, name);
_ec = new Mock<IExecutionContext>();
_ec.Setup(x => x.Global)
.Returns(new GlobalContext
{
FileTable = new List<String>(),
Variables = new Variables(_hc, new Dictionary<string, VariableValue>()),
WriteDebug = true,
});
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
}
private void Teardown()
{
_hc?.Dispose();
}
}
}

View File

@@ -25,7 +25,7 @@ namespace GitHub.Runner.Common.Tests.Worker
private Mock<IExecutionContext> _ec;
private TestHostContext _hc;
private ActionRunner _actionRunner;
private IActionManifestManager _actionManifestManager;
private IActionManifestManagerWrapper _actionManifestManager;
private Mock<IFileCommandManager> _fileCommandManager;
private DictionaryContextData _context = new();
@@ -459,9 +459,16 @@ namespace GitHub.Runner.Common.Tests.Worker
_handlerFactory = new Mock<IHandlerFactory>();
_defaultStepHost = new Mock<IDefaultStepHost>();
_actionManifestManager = new ActionManifestManager();
_fileCommandManager = new Mock<IFileCommandManager>();
var actionManifestLegacy = new ActionManifestManagerLegacy();
actionManifestLegacy.Initialize(_hc);
_hc.SetSingleton<IActionManifestManagerLegacy>(actionManifestLegacy);
var actionManifestNew = new ActionManifestManager();
actionManifestNew.Initialize(_hc);
_hc.SetSingleton<IActionManifestManager>(actionManifestNew);
_actionManifestManager = new ActionManifestManagerWrapper();
_actionManifestManager.Initialize(_hc);
_fileCommandManager = new Mock<IFileCommandManager>();
var githubContext = new GitHubContext();
githubContext.Add("event", JToken.Parse("{\"foo\":\"bar\"}").ToPipelineContextData());
@@ -489,7 +496,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_hc.SetSingleton<IActionManager>(_actionManager.Object);
_hc.SetSingleton<IHandlerFactory>(_handlerFactory.Object);
_hc.SetSingleton<IActionManifestManager>(_actionManifestManager);
_hc.SetSingleton<IActionManifestManagerWrapper>(_actionManifestManager);
_hc.EnqueueInstance<IDefaultStepHost>(_defaultStepHost.Object);

View File

@@ -17,7 +17,7 @@ LAYOUT_DIR="$SCRIPT_DIR/../_layout"
DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
PACKAGE_DIR="$SCRIPT_DIR/../_package"
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
DOTNETSDK_VERSION="8.0.415"
DOTNETSDK_VERSION="8.0.416"
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
RUNNER_VERSION=$(cat runnerversion)

View File

@@ -1,5 +1,5 @@
{
"sdk": {
"version": "8.0.415"
"version": "8.0.416"
}
}

View File

@@ -1 +1 @@
2.329.0
2.331.0