mirror of
https://github.com/actions/runner.git
synced 2026-01-16 08:42:55 +08:00
Compare commits
172 Commits
v2.322.0
...
rentziass/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b652350bda | ||
|
|
2525a1f9a3 | ||
|
|
ff85ab7fe0 | ||
|
|
2800573f56 | ||
|
|
f1a0d1a9f8 | ||
|
|
15b7034088 | ||
|
|
bbe97ff1c8 | ||
|
|
7a36a68b15 | ||
|
|
f45c5d0785 | ||
|
|
7e4f99337f | ||
|
|
186656e153 | ||
|
|
2e02381901 | ||
|
|
a55696a429 | ||
|
|
379ac038b2 | ||
|
|
14e8e1f667 | ||
|
|
3f43560cb9 | ||
|
|
73f7dbb681 | ||
|
|
f554a6446d | ||
|
|
bdceac4ab3 | ||
|
|
3f1dd45172 | ||
|
|
cf8f50b4d8 | ||
|
|
2cf22c4858 | ||
|
|
04d77df0c7 | ||
|
|
651077689d | ||
|
|
c96dcd4729 | ||
|
|
4b0058f15c | ||
|
|
87d1dfb798 | ||
|
|
c992a2b406 | ||
|
|
b2204f1fab | ||
|
|
f99c3e6ee8 | ||
|
|
463496e4fb | ||
|
|
3f9f6f3994 | ||
|
|
221f65874f | ||
|
|
9a21440691 | ||
|
|
54bcc001e5 | ||
|
|
7df164d2c7 | ||
|
|
a54f380b0e | ||
|
|
8b184c3871 | ||
|
|
b56b161118 | ||
|
|
69aca04de1 | ||
|
|
b3a60e6b06 | ||
|
|
334df748d1 | ||
|
|
b08f962182 | ||
|
|
b8144769c6 | ||
|
|
2a00363a90 | ||
|
|
a1c09806c3 | ||
|
|
c0776daddb | ||
|
|
b5b7986cd6 | ||
|
|
53d69ff441 | ||
|
|
bca18f71d0 | ||
|
|
1b8efb99f6 | ||
|
|
0b2c71fc31 | ||
|
|
60af948051 | ||
|
|
ff775ca101 | ||
|
|
f74be39e77 | ||
|
|
1eb15f28a7 | ||
|
|
afe4fc8446 | ||
|
|
a12731d34d | ||
|
|
18f2450d71 | ||
|
|
2c5f29c3ca | ||
|
|
c9de9a8699 | ||
|
|
68ff57dbc4 | ||
|
|
c774eb8d46 | ||
|
|
f184048a9a | ||
|
|
338d83a941 | ||
|
|
0b074a3e93 | ||
|
|
25faeabaa8 | ||
|
|
b121ef832b | ||
|
|
170033c92b | ||
|
|
f9c4e17fd9 | ||
|
|
646da708ba | ||
|
|
bf8236344b | ||
|
|
720f16aef6 | ||
|
|
f77066a6a8 | ||
|
|
df83df2a32 | ||
|
|
97b2254146 | ||
|
|
7f72ba9e48 | ||
|
|
f8ae5bb1a7 | ||
|
|
a5631456a2 | ||
|
|
65dfa460ba | ||
|
|
80ee51f164 | ||
|
|
c95883f28e | ||
|
|
6e940643a9 | ||
|
|
629f2384a4 | ||
|
|
c3bf70becb | ||
|
|
8b65f5f9df | ||
|
|
5f1efec208 | ||
|
|
20d82ad357 | ||
|
|
0ebdf9e83d | ||
|
|
6543bf206b | ||
|
|
a942627965 | ||
|
|
83539166c9 | ||
|
|
1c1e8bfd18 | ||
|
|
59177fa379 | ||
|
|
2d7635a7f0 | ||
|
|
0203cf24d3 | ||
|
|
5e74a4d8e4 | ||
|
|
6ca97eeb88 | ||
|
|
8a9b96806d | ||
|
|
dc9cf684c9 | ||
|
|
c765c990b9 | ||
|
|
ed48ddd08c | ||
|
|
a1e6ad8d2e | ||
|
|
14856e63bc | ||
|
|
0d24afa114 | ||
|
|
20912234a5 | ||
|
|
5969cbe208 | ||
|
|
9f57d37642 | ||
|
|
60563d82d1 | ||
|
|
097ada9374 | ||
|
|
9b457781d6 | ||
|
|
9709b69571 | ||
|
|
acf3f2ba12 | ||
|
|
f03fcc8a01 | ||
|
|
e4e103c5ed | ||
|
|
a906ec302b | ||
|
|
d9e714496d | ||
|
|
df189ba6e3 | ||
|
|
4c1de69e1c | ||
|
|
26185d43d0 | ||
|
|
e911d2908d | ||
|
|
ce4b7f4dd6 | ||
|
|
505fa60905 | ||
|
|
57459ad274 | ||
|
|
890e43f6c5 | ||
|
|
3a27ca292a | ||
|
|
282f7cd2b2 | ||
|
|
f060fe5c85 | ||
|
|
1a092a24a3 | ||
|
|
26eff8e55a | ||
|
|
d7cfd2e341 | ||
|
|
a3a7b6a77e | ||
|
|
db6005b0a7 | ||
|
|
9155c42c09 | ||
|
|
1c319b4d42 | ||
|
|
fe10d4ae82 | ||
|
|
27d9c886ab | ||
|
|
5106d6578e | ||
|
|
d5ccbd10d1 | ||
|
|
f1b5b5bd5c | ||
|
|
aaf1b92847 | ||
|
|
c1095ae2d1 | ||
|
|
a0a0a76378 | ||
|
|
d47013928b | ||
|
|
cdeec012aa | ||
|
|
2cb1f9431a | ||
|
|
e86c9487ab | ||
|
|
dc9695f123 | ||
|
|
6654f6b3de | ||
|
|
f5e4e7e47c | ||
|
|
68ca457917 | ||
|
|
77700abf81 | ||
|
|
a0ba8fd399 | ||
|
|
6b08f23b6c | ||
|
|
8131246933 | ||
|
|
7211a53c9e | ||
|
|
07310cabc0 | ||
|
|
0195d7ca77 | ||
|
|
259af3eda2 | ||
|
|
0ce29d09c6 | ||
|
|
a84e1c2b15 | ||
|
|
de51cd0ed6 | ||
|
|
3333de3a36 | ||
|
|
b065e5abbe | ||
|
|
bae52e28f9 | ||
|
|
c2c91438e8 | ||
|
|
3486c54ccb | ||
|
|
a61328a7e7 | ||
|
|
52dc98b10f | ||
|
|
a7b319530e | ||
|
|
54f082722f | ||
|
|
ed9d8fc9f7 |
@@ -4,7 +4,7 @@
|
|||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||||
"ghcr.io/devcontainers/features/dotnet": {
|
"ghcr.io/devcontainers/features/dotnet": {
|
||||||
"version": "8.0.405"
|
"version": "8.0.416"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers/features/node:1": {
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
"version": "20"
|
"version": "20"
|
||||||
|
|||||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,13 +1,13 @@
|
|||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: 🛑 Request a feature in the runner application
|
- name: 🛑 Request a feature in the runner application
|
||||||
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
|
url: https://github.com/orgs/community/discussions/categories/actions
|
||||||
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
|
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
|
||||||
- name: ✅ Support for GitHub Actions
|
- name: ✅ Support for GitHub Actions
|
||||||
url: https://github.community/c/code-to-cloud/52
|
url: https://github.community/c/code-to-cloud/52
|
||||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||||
- name: ✅ Feedback and suggestions for GitHub Actions
|
- name: ✅ Feedback and suggestions for GitHub Actions
|
||||||
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
|
url: https://github.com/github/feedback/discussions/categories/actions
|
||||||
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
||||||
- name: ‼️ GitHub Security Bug Bounty
|
- name: ‼️ GitHub Security Bug Bounty
|
||||||
url: https://bounty.github.com/
|
url: https://bounty.github.com/
|
||||||
|
|||||||
25
.github/copilot-instructions.md
vendored
Normal file
25
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
## Making changes
|
||||||
|
|
||||||
|
### Tests
|
||||||
|
|
||||||
|
Whenever possible, changes should be accompanied by non-trivial tests that meaningfully exercise the core functionality of the new code being introduced.
|
||||||
|
|
||||||
|
All tests are in the `Test/` directory at the repo root. Fast unit tests are in the `Test/L0` directory and by convention have the suffix `L0.cs`. For example: unit tests for a hypothetical `src/Runner.Worker/Foo.cs` would go in `src/Test/L0/Worker/FooL0.cs`.
|
||||||
|
|
||||||
|
Run tests using this command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd src && ./dev.sh test
|
||||||
|
```
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
After editing .cs files, always format the code using this command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
cd src && ./dev.sh format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Feature Flags
|
||||||
|
|
||||||
|
Wherever possible, all changes should be safeguarded by a feature flag; `Features` are declared in [Constants.cs](src/Runner.Common/Constants.cs).
|
||||||
54
.github/workflows/build.yml
vendored
54
.github/workflows/build.yml
vendored
@@ -14,6 +14,9 @@ on:
|
|||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
@@ -41,7 +44,7 @@ jobs:
|
|||||||
devScript: ./dev.sh
|
devScript: ./dev.sh
|
||||||
|
|
||||||
- runtime: win-x64
|
- runtime: win-x64
|
||||||
os: windows-2019
|
os: windows-latest
|
||||||
devScript: ./dev
|
devScript: ./dev
|
||||||
|
|
||||||
- runtime: win-arm64
|
- runtime: win-arm64
|
||||||
@@ -50,7 +53,7 @@ jobs:
|
|||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
# Build runner layout
|
# Build runner layout
|
||||||
- name: Build & Layout Release
|
- name: Build & Layout Release
|
||||||
@@ -75,8 +78,53 @@ jobs:
|
|||||||
# Upload runner package tar.gz/zip as artifact
|
# Upload runner package tar.gz/zip as artifact
|
||||||
- name: Publish Artifact
|
- name: Publish Artifact
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: runner-package-${{ matrix.runtime }}
|
name: runner-package-${{ matrix.runtime }}
|
||||||
path: |
|
path: |
|
||||||
_package
|
_package
|
||||||
|
|
||||||
|
docker:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ ubuntu-latest, ubuntu-24.04-arm ]
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
docker_platform: linux/amd64
|
||||||
|
- os: ubuntu-24.04-arm
|
||||||
|
docker_platform: linux/arm64
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Get latest runner version
|
||||||
|
id: latest_runner
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
script: |
|
||||||
|
const release = await github.rest.repos.getLatestRelease({
|
||||||
|
owner: 'actions',
|
||||||
|
repo: 'runner',
|
||||||
|
});
|
||||||
|
const version = release.data.tag_name.replace(/^v/, '');
|
||||||
|
core.setOutput('version', version);
|
||||||
|
|
||||||
|
- name: Setup Docker buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Build Docker image
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: ./images
|
||||||
|
load: true
|
||||||
|
platforms: ${{ matrix.docker_platform }}
|
||||||
|
tags: |
|
||||||
|
${{ github.sha }}:latest
|
||||||
|
build-args: |
|
||||||
|
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
|
||||||
|
|
||||||
|
- name: Test Docker image
|
||||||
|
run: |
|
||||||
|
docker run --rm ${{ github.sha }}:latest ./run.sh --version
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/close-bugs-bot.yml
vendored
2
.github/workflows/close-bugs-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@v10
|
||||||
with:
|
with:
|
||||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||||
exempt-issue-labels: "keep"
|
exempt-issue-labels: "keep"
|
||||||
|
|||||||
4
.github/workflows/close-features-bot.yml
vendored
4
.github/workflows/close-features-bot.yml
vendored
@@ -7,9 +7,9 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@v10
|
||||||
with:
|
with:
|
||||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||||
exempt-issue-labels: "keep"
|
exempt-issue-labels: "keep"
|
||||||
stale-issue-label: "actions-feature"
|
stale-issue-label: "actions-feature"
|
||||||
only-labels: "actions-feature"
|
only-labels: "actions-feature"
|
||||||
|
|||||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -23,11 +23,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v4
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
# with:
|
# with:
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
@@ -38,4 +38,4 @@ jobs:
|
|||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v4
|
||||||
|
|||||||
211
.github/workflows/dependency-check.yml
vendored
Normal file
211
.github/workflows/dependency-check.yml
vendored
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
name: Dependency Status Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
check_type:
|
||||||
|
description: "Type of dependency check"
|
||||||
|
required: false
|
||||||
|
default: "all"
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- all
|
||||||
|
- node
|
||||||
|
- dotnet
|
||||||
|
- docker
|
||||||
|
- npm
|
||||||
|
schedule:
|
||||||
|
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-status:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
node20-status: ${{ steps.check-versions.outputs.node20-status }}
|
||||||
|
node24-status: ${{ steps.check-versions.outputs.node24-status }}
|
||||||
|
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
|
||||||
|
docker-status: ${{ steps.check-versions.outputs.docker-status }}
|
||||||
|
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
|
||||||
|
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
|
||||||
|
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
|
||||||
|
- name: Check dependency versions
|
||||||
|
id: check-versions
|
||||||
|
run: |
|
||||||
|
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Check Node versions
|
||||||
|
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
|
||||||
|
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
|
||||||
|
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
|
||||||
|
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
|
||||||
|
|
||||||
|
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
|
||||||
|
NODE20_STATUS="✅ up-to-date"
|
||||||
|
NODE24_STATUS="✅ up-to-date"
|
||||||
|
|
||||||
|
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||||
|
NODE20_STATUS="⚠️ outdated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||||
|
NODE24_STATUS="⚠️ outdated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check .NET version
|
||||||
|
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
|
||||||
|
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
|
||||||
|
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
|
||||||
|
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
|
||||||
|
|
||||||
|
DOTNET_STATUS="✅ up-to-date"
|
||||||
|
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
|
||||||
|
DOTNET_STATUS="⚠️ outdated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check Docker versions
|
||||||
|
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
|
||||||
|
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||||
|
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||||
|
|
||||||
|
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||||
|
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||||
|
|
||||||
|
DOCKER_STATUS="✅ up-to-date"
|
||||||
|
BUILDX_STATUS="✅ up-to-date"
|
||||||
|
|
||||||
|
if [ "$current_docker" != "$latest_docker" ]; then
|
||||||
|
DOCKER_STATUS="⚠️ outdated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$current_buildx" != "$latest_buildx" ]; then
|
||||||
|
BUILDX_STATUS="⚠️ outdated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check npm vulnerabilities
|
||||||
|
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
|
||||||
|
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
cd src/Misc/expressionFunc/hashFiles
|
||||||
|
npm install --silent
|
||||||
|
|
||||||
|
AUDIT_OUTPUT=""
|
||||||
|
AUDIT_EXIT_CODE=0
|
||||||
|
# Run npm audit and capture output and exit code
|
||||||
|
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
|
||||||
|
AUDIT_EXIT_CODE=$?
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if output is valid JSON
|
||||||
|
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
|
||||||
|
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
|
||||||
|
# Ensure VULN_COUNT is a number
|
||||||
|
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
|
||||||
|
VULN_COUNT=${VULN_COUNT:-0}
|
||||||
|
|
||||||
|
NPM_STATUS="✅ no vulnerabilities"
|
||||||
|
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
|
||||||
|
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
|
||||||
|
|
||||||
|
# Get vulnerability details
|
||||||
|
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
|
||||||
|
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
|
||||||
|
|
||||||
|
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
NPM_STATUS="❌ npm audit failed"
|
||||||
|
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Check for open dependency PRs
|
||||||
|
id: check-prs
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
# Get open PRs with dependency label
|
||||||
|
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
|
||||||
|
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
|
||||||
|
|
||||||
|
if [ "$PR_COUNT" -gt 0 ]; then
|
||||||
|
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
|
||||||
|
else
|
||||||
|
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Summary
|
||||||
|
run: |
|
||||||
|
echo "### Summary" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||||
|
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||||
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
166
.github/workflows/docker-buildx-upgrade.yml
vendored
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
name: "Docker/Buildx Version Upgrade"
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * 1" # Run every Monday at midnight
|
||||||
|
workflow_dispatch: # Allow manual triggering
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-versions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
DOCKER_SHOULD_UPDATE: ${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}
|
||||||
|
DOCKER_LATEST_VERSION: ${{ steps.check_docker_version.outputs.LATEST_VERSION }}
|
||||||
|
DOCKER_CURRENT_VERSION: ${{ steps.check_docker_version.outputs.CURRENT_VERSION }}
|
||||||
|
BUILDX_SHOULD_UPDATE: ${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}
|
||||||
|
BUILDX_LATEST_VERSION: ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}
|
||||||
|
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check Docker version
|
||||||
|
id: check_docker_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Extract current Docker version from Dockerfile
|
||||||
|
current_version=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||||
|
|
||||||
|
# Fetch latest Docker Engine version from Docker's download site
|
||||||
|
# This gets the latest Linux static binary version which matches what's used in the Dockerfile
|
||||||
|
latest_version=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||||
|
|
||||||
|
# Extra check to ensure we got a valid version
|
||||||
|
if [[ ! $latest_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
echo "Failed to retrieve a valid Docker version"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
should_update=0
|
||||||
|
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||||
|
|
||||||
|
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Check Buildx version
|
||||||
|
id: check_buildx_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
# Extract current Buildx version from Dockerfile
|
||||||
|
current_version=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||||
|
|
||||||
|
# Fetch latest Buildx version
|
||||||
|
latest_version=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||||
|
|
||||||
|
should_update=0
|
||||||
|
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||||
|
|
||||||
|
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Create annotations for versions
|
||||||
|
run: |
|
||||||
|
docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}"
|
||||||
|
buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}"
|
||||||
|
|
||||||
|
# Show annotation if only Docker needs update
|
||||||
|
if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then
|
||||||
|
echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show annotation if only Buildx needs update
|
||||||
|
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then
|
||||||
|
echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show annotation when both are current
|
||||||
|
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then
|
||||||
|
echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
update-versions:
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
needs: [check-versions]
|
||||||
|
if: ${{ needs.check-versions.outputs.DOCKER_SHOULD_UPDATE == 1 && needs.check-versions.outputs.BUILDX_SHOULD_UPDATE == 1 }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Update Docker version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}"
|
||||||
|
current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}"
|
||||||
|
|
||||||
|
# Update version in Dockerfile
|
||||||
|
sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile
|
||||||
|
|
||||||
|
- name: Update Buildx version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||||
|
current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}"
|
||||||
|
|
||||||
|
# Update version in Dockerfile
|
||||||
|
sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile
|
||||||
|
|
||||||
|
- name: Commit changes and create Pull Request
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
# Setup branch and commit information
|
||||||
|
branch_name="feature/docker-buildx-upgrade"
|
||||||
|
commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||||
|
pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||||
|
|
||||||
|
# Configure git
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
# Create branch or switch to it if it exists
|
||||||
|
if git show-ref --quiet refs/remotes/origin/$branch_name; then
|
||||||
|
git fetch origin
|
||||||
|
git checkout -B "$branch_name" origin/$branch_name
|
||||||
|
else
|
||||||
|
git checkout -b "$branch_name"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Commit and push changes
|
||||||
|
git commit -a -m "$commit_message"
|
||||||
|
git push --force origin "$branch_name"
|
||||||
|
|
||||||
|
# Create PR body using here-doc for proper formatting
|
||||||
|
cat > pr_body.txt << 'EOF'
|
||||||
|
Automated Docker and Buildx version update:
|
||||||
|
|
||||||
|
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
|
||||||
|
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
|
||||||
|
|
||||||
|
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
|
||||||
|
|
||||||
|
**Release notes:** https://docs.docker.com/engine/release-notes/
|
||||||
|
|
||||||
|
**Next steps:**
|
||||||
|
- Review the version changes
|
||||||
|
- Verify container builds work as expected
|
||||||
|
- Test multi-platform builds if applicable
|
||||||
|
- Merge when ready
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create PR
|
||||||
|
gh pr create -B main -H "$branch_name" \
|
||||||
|
--title "$pr_title" \
|
||||||
|
--label "dependencies" \
|
||||||
|
--label "dependencies-weekly-check" \
|
||||||
|
--label "dependencies-not-dependabot" \
|
||||||
|
--label "docker" \
|
||||||
|
--body-file pr_body.txt
|
||||||
@@ -1,50 +1,47 @@
|
|||||||
name: Publish Runner Image
|
name: Publish DockerImage from Release Branch
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
runnerVersion:
|
releaseBranch:
|
||||||
type: string
|
description: 'Release Branch (releases/mXXX)'
|
||||||
description: Version of the runner being installed
|
required: true
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
publish-image:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.inputs.releaseBranch }}
|
||||||
|
|
||||||
- name: Compute image version
|
- name: Compute image version
|
||||||
id: image
|
id: image
|
||||||
uses: actions/github-script@v6
|
uses: actions/github-script@v8
|
||||||
env:
|
|
||||||
RUNNER_VERSION: ${{ github.event.inputs.runnerVersion }}
|
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const inputRunnerVersion = process.env.RUNNER_VERSION;
|
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
|
||||||
if (inputRunnerVersion) {
|
console.log(`Using runner version ${runnerVersion}`);
|
||||||
console.log(`Using input runner version ${inputRunnerVersion}`)
|
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
|
||||||
core.setOutput('version', inputRunnerVersion);
|
throw new Error(`Invalid runner version: ${runnerVersion}`);
|
||||||
return
|
|
||||||
}
|
}
|
||||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
|
||||||
console.log(`Using runner version ${runnerVersion}`)
|
|
||||||
core.setOutput('version', runnerVersion);
|
core.setOutput('version', runnerVersion);
|
||||||
|
|
||||||
- name: Setup Docker buildx
|
- name: Setup Docker buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Log into registry ${{ env.REGISTRY }}
|
- name: Log into registry ${{ env.REGISTRY }}
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -52,7 +49,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
id: build-and-push
|
id: build-and-push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
platforms: |
|
platforms: |
|
||||||
@@ -66,5 +63,13 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
|
||||||
org.opencontainers.image.licenses=MIT
|
org.opencontainers.image.licenses=MIT
|
||||||
|
annotations: |
|
||||||
|
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||||
|
|
||||||
|
- name: Generate attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
push-to-registry: true
|
||||||
26
.github/workflows/dotnet-upgrade.yml
vendored
26
.github/workflows/dotnet-upgrade.yml
vendored
@@ -2,7 +2,7 @@ name: "DotNet SDK Upgrade"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * 1'
|
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Get current major minor version
|
- name: Get current major minor version
|
||||||
id: fetch_current_version
|
id: fetch_current_version
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -89,17 +89,17 @@ jobs:
|
|||||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
|
||||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||||
|
|||||||
194
.github/workflows/node-upgrade.yml
vendored
Normal file
194
.github/workflows/node-upgrade.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
name: Auto Update Node Version
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 6 * * 1" # Weekly, every Monday
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-node:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Get latest Node versions
|
||||||
|
id: node-versions
|
||||||
|
run: |
|
||||||
|
# Get latest Node.js releases from official GitHub releases
|
||||||
|
echo "Fetching latest Node.js releases..."
|
||||||
|
|
||||||
|
# Get latest v20.x release
|
||||||
|
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||||
|
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
|
||||||
|
head -1 | sed 's/^v//')
|
||||||
|
|
||||||
|
# Get latest v24.x release
|
||||||
|
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||||
|
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
|
||||||
|
head -1 | sed 's/^v//')
|
||||||
|
|
||||||
|
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||||
|
|
||||||
|
# Verify these versions are available in alpine_nodejs releases
|
||||||
|
echo "Verifying availability in alpine_nodejs..."
|
||||||
|
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
|
||||||
|
|
||||||
|
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
|
||||||
|
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
|
||||||
|
# Fall back to latest available alpine_nodejs v20 release
|
||||||
|
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
|
||||||
|
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
|
||||||
|
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
|
||||||
|
# Fall back to latest available alpine_nodejs v24 release
|
||||||
|
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
|
||||||
|
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Validate that we have non-empty version numbers
|
||||||
|
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
|
||||||
|
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
|
||||||
|
echo "Available alpine_nodejs releases:"
|
||||||
|
echo "$ALPINE_RELEASES" | head -10
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
|
||||||
|
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
|
||||||
|
echo "Available alpine_nodejs releases:"
|
||||||
|
echo "$ALPINE_RELEASES" | head -10
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Additional validation: ensure versions match expected format (x.y.z)
|
||||||
|
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||||
|
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||||
|
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||||
|
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
|
||||||
|
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Check current versions in externals.sh
|
||||||
|
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
|
||||||
|
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
|
||||||
|
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# Determine if updates are needed
|
||||||
|
NEEDS_UPDATE20="false"
|
||||||
|
NEEDS_UPDATE24="false"
|
||||||
|
|
||||||
|
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||||
|
NEEDS_UPDATE20="true"
|
||||||
|
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||||
|
NEEDS_UPDATE24="true"
|
||||||
|
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
|
||||||
|
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
|
||||||
|
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Update externals.sh and create PR
|
||||||
|
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
# Final validation before making changes
|
||||||
|
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
|
||||||
|
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
|
||||||
|
|
||||||
|
echo "Final validation of versions before PR creation:"
|
||||||
|
echo "Node 20: '$NODE20_VERSION'"
|
||||||
|
echo "Node 24: '$NODE24_VERSION'"
|
||||||
|
|
||||||
|
# Validate versions are not empty and match expected format
|
||||||
|
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||||
|
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||||
|
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ All versions validated successfully"
|
||||||
|
|
||||||
|
# Update the files
|
||||||
|
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
|
||||||
|
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
|
||||||
|
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify the changes were applied correctly
|
||||||
|
echo "Verifying changes in externals.sh:"
|
||||||
|
grep "NODE20_VERSION=" src/Misc/externals.sh
|
||||||
|
grep "NODE24_VERSION=" src/Misc/externals.sh
|
||||||
|
|
||||||
|
# Ensure we actually have valid versions in the file
|
||||||
|
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||||
|
|
||||||
|
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
|
||||||
|
echo "::error title=Update Failed::Failed to properly update externals.sh"
|
||||||
|
echo "Updated Node 20: '$UPDATED_NODE20'"
|
||||||
|
echo "Updated Node 24: '$UPDATED_NODE24'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure git
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
# Create branch and commit changes
|
||||||
|
branch_name="chore/update-node"
|
||||||
|
git checkout -b "$branch_name"
|
||||||
|
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
|
||||||
|
git push --force origin "$branch_name"
|
||||||
|
|
||||||
|
# Create PR body using here-doc for proper formatting
|
||||||
|
cat > pr_body.txt << EOF
|
||||||
|
Automated Node.js version update:
|
||||||
|
|
||||||
|
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
|
||||||
|
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
|
||||||
|
|
||||||
|
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
|
||||||
|
|
||||||
|
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create PR
|
||||||
|
gh pr create -B main -H "$branch_name" \
|
||||||
|
--title "chore: update Node versions" \
|
||||||
|
--label "dependencies" \
|
||||||
|
--label "dependencies-weekly-check" \
|
||||||
|
--label "dependencies-not-dependabot" \
|
||||||
|
--label "node" \
|
||||||
|
--label "javascript" \
|
||||||
|
--body-file pr_body.txt
|
||||||
|
|
||||||
|
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"
|
||||||
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
@@ -0,0 +1,235 @@
|
|||||||
|
name: NPM Audit Fix with TypeScript Auto-Fix
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
npm-audit-with-ts-fix:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
- name: NPM install and audit fix with TypeScript auto-repair
|
||||||
|
working-directory: src/Misc/expressionFunc/hashFiles
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Check for vulnerabilities first
|
||||||
|
echo "Checking for npm vulnerabilities..."
|
||||||
|
if npm audit --audit-level=moderate; then
|
||||||
|
echo "✅ No moderate or higher vulnerabilities found"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
|
||||||
|
|
||||||
|
# Attempt audit fix and capture the result
|
||||||
|
if npm audit fix; then
|
||||||
|
echo "✅ npm audit fix completed successfully"
|
||||||
|
AUDIT_FIX_STATUS="success"
|
||||||
|
else
|
||||||
|
echo "⚠️ npm audit fix failed or had issues"
|
||||||
|
AUDIT_FIX_STATUS="failed"
|
||||||
|
|
||||||
|
# Try audit fix with --force as a last resort for critical/high vulns only
|
||||||
|
echo "Checking if critical/high vulnerabilities remain..."
|
||||||
|
if ! npm audit --audit-level=high; then
|
||||||
|
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
|
||||||
|
if npm audit fix --force; then
|
||||||
|
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
|
||||||
|
AUDIT_FIX_STATUS="force-fixed"
|
||||||
|
else
|
||||||
|
echo "❌ npm audit fix --force also failed"
|
||||||
|
AUDIT_FIX_STATUS="force-failed"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
|
||||||
|
AUDIT_FIX_STATUS="partial-success"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# Try to fix TypeScript issues automatically
|
||||||
|
echo "Attempting to fix TypeScript compatibility issues..."
|
||||||
|
|
||||||
|
# Check if build fails
|
||||||
|
if ! npm run build 2>/dev/null; then
|
||||||
|
echo "Build failed, attempting automated fixes..."
|
||||||
|
|
||||||
|
# Common fix 1: Update @types/node to latest compatible version
|
||||||
|
echo "Trying to update @types/node to latest version..."
|
||||||
|
npm update @types/node
|
||||||
|
|
||||||
|
# Common fix 2: If that doesn't work, try installing a specific known-good version
|
||||||
|
if ! npm run build 2>/dev/null; then
|
||||||
|
echo "Trying specific @types/node version..."
|
||||||
|
# Try Node 20 compatible version
|
||||||
|
npm install --save-dev @types/node@^20.0.0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Common fix 3: Clear node_modules and reinstall if still failing
|
||||||
|
if ! npm run build 2>/dev/null; then
|
||||||
|
echo "Clearing node_modules and reinstalling..."
|
||||||
|
rm -rf node_modules package-lock.json
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Re-run audit fix after clean install if it was successful before
|
||||||
|
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
|
||||||
|
echo "Re-running npm audit fix after clean install..."
|
||||||
|
npm audit fix || echo "Audit fix failed on second attempt"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Common fix 4: Try updating TypeScript itself
|
||||||
|
if ! npm run build 2>/dev/null; then
|
||||||
|
echo "Trying to update TypeScript..."
|
||||||
|
npm update typescript
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Final check
|
||||||
|
if npm run build 2>/dev/null; then
|
||||||
|
echo "✅ Successfully fixed TypeScript issues automatically"
|
||||||
|
else
|
||||||
|
echo "⚠️ Could not automatically fix TypeScript issues"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "✅ Build passes after audit fix"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create PR if changes exist
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
HUSKY: 0 # Disable husky hooks for automated commits
|
||||||
|
run: |
|
||||||
|
# Check if there are any changes
|
||||||
|
if [ -n "$(git status --porcelain)" ]; then
|
||||||
|
# Configure git
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
# Create branch and commit changes
|
||||||
|
branch_name="chore/npm-audit-fix-with-ts-repair"
|
||||||
|
git checkout -b "$branch_name"
|
||||||
|
|
||||||
|
# Commit with --no-verify to skip husky hooks
|
||||||
|
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
|
||||||
|
git push --force origin "$branch_name"
|
||||||
|
|
||||||
|
# Check final build status and gather info about what was changed
|
||||||
|
build_status="✅ Build passes"
|
||||||
|
fixes_applied=""
|
||||||
|
cd src/Misc/expressionFunc/hashFiles
|
||||||
|
|
||||||
|
# Check what packages were updated
|
||||||
|
if git diff HEAD~1 package.json | grep -q "@types/node"; then
|
||||||
|
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
|
||||||
|
fi
|
||||||
|
if git diff HEAD~1 package.json | grep -q "typescript"; then
|
||||||
|
fixes_applied+="\n- Updated TypeScript version"
|
||||||
|
fi
|
||||||
|
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
|
||||||
|
fixes_applied+="\n- Updated package dependencies via npm audit fix"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! npm run build 2>/dev/null; then
|
||||||
|
build_status="⚠️ Build fails - manual review required"
|
||||||
|
fi
|
||||||
|
cd - > /dev/null
|
||||||
|
|
||||||
|
# Create enhanced PR body using here-doc for proper formatting
|
||||||
|
audit_status_msg=""
|
||||||
|
case "$AUDIT_FIX_STATUS" in
|
||||||
|
"success")
|
||||||
|
audit_status_msg="✅ **Audit Fix**: Completed successfully"
|
||||||
|
;;
|
||||||
|
"partial-success")
|
||||||
|
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
|
||||||
|
;;
|
||||||
|
"force-fixed")
|
||||||
|
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
|
||||||
|
;;
|
||||||
|
"failed"|"force-failed")
|
||||||
|
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
audit_status_msg="❓ **Audit Fix**: Status unknown"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [[ "$build_status" == *"fails"* ]]; then
|
||||||
|
cat > pr_body.txt << EOF
|
||||||
|
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||||
|
|
||||||
|
**Build Status**: ⚠️ Build fails - manual review required
|
||||||
|
$audit_status_msg
|
||||||
|
|
||||||
|
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||||
|
|
||||||
|
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
|
||||||
|
|
||||||
|
Common issues and solutions:
|
||||||
|
- Check for TypeScript version compatibility with Node.js types
|
||||||
|
- Review breaking changes in updated dependencies
|
||||||
|
- Consider pinning problematic dependency versions temporarily
|
||||||
|
- Review tsconfig.json for compatibility settings
|
||||||
|
|
||||||
|
**Automated Fix Strategy**:
|
||||||
|
1. Run npm audit fix with proper error handling
|
||||||
|
2. Update @types/node to latest compatible version
|
||||||
|
3. Try Node 20 specific @types/node version if needed
|
||||||
|
4. Clean reinstall dependencies if conflicts persist
|
||||||
|
5. Update TypeScript compiler if necessary
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
cat > pr_body.txt << EOF
|
||||||
|
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||||
|
|
||||||
|
**Build Status**: ✅ Build passes
|
||||||
|
$audit_status_msg
|
||||||
|
|
||||||
|
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||||
|
|
||||||
|
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
|
||||||
|
|
||||||
|
**Automated Fix Strategy**:
|
||||||
|
1. Run npm audit fix with proper error handling
|
||||||
|
2. Update @types/node to latest compatible version
|
||||||
|
3. Try Node 20 specific @types/node version if needed
|
||||||
|
4. Clean reinstall dependencies if conflicts persist
|
||||||
|
5. Update TypeScript compiler if necessary
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$fixes_applied" ]; then
|
||||||
|
# Add the fixes applied section to the file
|
||||||
|
sed -i "/This workflow attempts/a\\
|
||||||
|
\\
|
||||||
|
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create PR with appropriate labels
|
||||||
|
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
|
||||||
|
if [[ "$build_status" == *"fails"* ]]; then
|
||||||
|
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create PR
|
||||||
|
gh pr create -B main -H "$branch_name" \
|
||||||
|
--title "chore: npm audit fix with TypeScript auto-repair" \
|
||||||
|
--label "$labels" \
|
||||||
|
--body-file pr_body.txt
|
||||||
|
else
|
||||||
|
echo "No changes to commit"
|
||||||
|
fi
|
||||||
137
.github/workflows/npm-audit.yml
vendored
Normal file
137
.github/workflows/npm-audit.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
name: NPM Audit Fix
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
npm-audit:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: "20"
|
||||||
|
|
||||||
|
- name: NPM install and audit fix
|
||||||
|
working-directory: src/Misc/expressionFunc/hashFiles
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Check what vulnerabilities exist
|
||||||
|
echo "=== Checking current vulnerabilities ==="
|
||||||
|
npm audit || true
|
||||||
|
|
||||||
|
# Apply audit fix --force to get security updates
|
||||||
|
echo "=== Applying npm audit fix --force ==="
|
||||||
|
npm audit fix --force
|
||||||
|
|
||||||
|
# Test if build still works and set status
|
||||||
|
echo "=== Testing build compatibility ==="
|
||||||
|
if npm run all; then
|
||||||
|
echo "✅ Build successful after audit fix"
|
||||||
|
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "❌ Build failed after audit fix - will create PR with fix instructions"
|
||||||
|
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Create PR if changes exist
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
# Check if there are any changes
|
||||||
|
if [ -n "$(git status --porcelain)" ]; then
|
||||||
|
# Configure git
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
# Create branch and commit changes
|
||||||
|
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
|
||||||
|
git checkout -b "$branch_name"
|
||||||
|
git add .
|
||||||
|
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
|
||||||
|
git push origin "$branch_name"
|
||||||
|
|
||||||
|
# Create PR body based on what actually happened
|
||||||
|
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
|
||||||
|
cat > pr_body.txt << 'EOF'
|
||||||
|
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||||
|
|
||||||
|
**✅ Full Fix Applied Successfully**
|
||||||
|
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
|
||||||
|
|
||||||
|
**Changes made:**
|
||||||
|
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||||
|
- Updated package-lock.json with security patches
|
||||||
|
- Verified build compatibility with `npm run all`
|
||||||
|
|
||||||
|
**Next steps:**
|
||||||
|
- Review the dependency changes
|
||||||
|
- Verify the hashFiles functionality still works as expected
|
||||||
|
- Merge when ready
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||||
|
EOF
|
||||||
|
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
|
||||||
|
cat > pr_body.txt << 'EOF'
|
||||||
|
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||||
|
|
||||||
|
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
|
||||||
|
This update applies important security patches but causes build failures that require manual fixes.
|
||||||
|
|
||||||
|
**Changes made:**
|
||||||
|
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||||
|
- Updated package-lock.json with security patches
|
||||||
|
|
||||||
|
**⚠️ Build Issues Detected:**
|
||||||
|
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
|
||||||
|
|
||||||
|
**Required Manual Fixes:**
|
||||||
|
1. Review TypeScript compilation errors in the build output
|
||||||
|
2. Update TypeScript configuration if needed
|
||||||
|
3. Consider pinning `@types/node` to a compatible version
|
||||||
|
4. Run `npm run all` locally to verify fixes
|
||||||
|
|
||||||
|
**Next steps:**
|
||||||
|
- **DO NOT merge until build issues are resolved**
|
||||||
|
- Apply manual fixes for TypeScript compatibility
|
||||||
|
- Test the hashFiles functionality still works as expected
|
||||||
|
- Merge when build passes
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
# Fallback case
|
||||||
|
cat > pr_body.txt << 'EOF'
|
||||||
|
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
|
||||||
|
|
||||||
|
**ℹ️ No Changes Applied**
|
||||||
|
No security vulnerabilities were found or no changes were needed.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create PR
|
||||||
|
gh pr create -B main -H "$branch_name" \
|
||||||
|
--title "chore: npm audit fix for hashFiles dependencies" \
|
||||||
|
--label "dependencies" \
|
||||||
|
--label "dependencies-weekly-check" \
|
||||||
|
--label "dependencies-not-dependabot" \
|
||||||
|
--label "npm" \
|
||||||
|
--label "typescript" \
|
||||||
|
--label "security" \
|
||||||
|
--body-file pr_body.txt
|
||||||
|
else
|
||||||
|
echo "✅ No changes to commit - npm audit fix did not modify any files"
|
||||||
|
fi
|
||||||
66
.github/workflows/release.yml
vendored
66
.github/workflows/release.yml
vendored
@@ -11,16 +11,15 @@ jobs:
|
|||||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
# Make sure ./releaseVersion match ./src/runnerversion
|
# Make sure ./releaseVersion match ./src/runnerversion
|
||||||
# Query GitHub release ensure version is not used
|
# Query GitHub release ensure version is not used
|
||||||
- name: Check version
|
- name: Check version
|
||||||
uses: actions/github-script@0.3.0
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||||
script: |
|
script: |
|
||||||
const core = require('@actions/core')
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||||
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
||||||
@@ -30,7 +29,7 @@ jobs:
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const release = await github.repos.getReleaseByTag({
|
const release = await github.rest.repos.getReleaseByTag({
|
||||||
owner: '${{ github.event.repository.owner.name }}',
|
owner: '${{ github.event.repository.owner.name }}',
|
||||||
repo: '${{ github.event.repository.name }}',
|
repo: '${{ github.event.repository.name }}',
|
||||||
tag: 'v' + runnerVersion
|
tag: 'v' + runnerVersion
|
||||||
@@ -78,7 +77,7 @@ jobs:
|
|||||||
devScript: ./dev.sh
|
devScript: ./dev.sh
|
||||||
|
|
||||||
- runtime: win-x64
|
- runtime: win-x64
|
||||||
os: windows-2019
|
os: windows-latest
|
||||||
devScript: ./dev
|
devScript: ./dev
|
||||||
|
|
||||||
- runtime: win-arm64
|
- runtime: win-arm64
|
||||||
@@ -87,7 +86,7 @@ jobs:
|
|||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
# Build runner layout
|
# Build runner layout
|
||||||
- name: Build & Layout Release
|
- name: Build & Layout Release
|
||||||
@@ -119,7 +118,7 @@ jobs:
|
|||||||
# Upload runner package tar.gz/zip as artifact.
|
# Upload runner package tar.gz/zip as artifact.
|
||||||
- name: Publish Artifact
|
- name: Publish Artifact
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: runner-packages-${{ matrix.runtime }}
|
name: runner-packages-${{ matrix.runtime }}
|
||||||
path: |
|
path: |
|
||||||
@@ -130,41 +129,41 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
# Download runner package tar.gz/zip produced by 'build' job
|
# Download runner package tar.gz/zip produced by 'build' job
|
||||||
- name: Download Artifact (win-x64)
|
- name: Download Artifact (win-x64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-win-x64
|
name: runner-packages-win-x64
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (win-arm64)
|
- name: Download Artifact (win-arm64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-win-arm64
|
name: runner-packages-win-arm64
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (osx-x64)
|
- name: Download Artifact (osx-x64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-osx-x64
|
name: runner-packages-osx-x64
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (osx-arm64)
|
- name: Download Artifact (osx-arm64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-osx-arm64
|
name: runner-packages-osx-arm64
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (linux-x64)
|
- name: Download Artifact (linux-x64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-linux-x64
|
name: runner-packages-linux-x64
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (linux-arm)
|
- name: Download Artifact (linux-arm)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-linux-arm
|
name: runner-packages-linux-arm
|
||||||
path: ./
|
path: ./
|
||||||
- name: Download Artifact (linux-arm64)
|
- name: Download Artifact (linux-arm64)
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: runner-packages-linux-arm64
|
name: runner-packages-linux-arm64
|
||||||
path: ./
|
path: ./
|
||||||
@@ -172,11 +171,10 @@ jobs:
|
|||||||
# Create ReleaseNote file
|
# Create ReleaseNote file
|
||||||
- name: Create ReleaseNote
|
- name: Create ReleaseNote
|
||||||
id: releaseNote
|
id: releaseNote
|
||||||
uses: actions/github-script@0.3.0
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||||
script: |
|
script: |
|
||||||
const core = require('@actions/core')
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||||
@@ -216,7 +214,7 @@ jobs:
|
|||||||
|
|
||||||
# Upload release assets (full runner packages)
|
# Upload release assets (full runner packages)
|
||||||
- name: Upload Release Asset (win-x64)
|
- name: Upload Release Asset (win-x64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -226,7 +224,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (win-arm64)
|
- name: Upload Release Asset (win-arm64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -236,7 +234,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-x64)
|
- name: Upload Release Asset (linux-x64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -246,7 +244,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-x64)
|
- name: Upload Release Asset (osx-x64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -256,7 +254,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (osx-arm64)
|
- name: Upload Release Asset (osx-arm64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -266,7 +264,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm)
|
- name: Upload Release Asset (linux-arm)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -276,7 +274,7 @@ jobs:
|
|||||||
asset_content_type: application/octet-stream
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
- name: Upload Release Asset (linux-arm64)
|
- name: Upload Release Asset (linux-arm64)
|
||||||
uses: actions/upload-release-asset@v1.0.1
|
uses: actions/upload-release-asset@v1.0.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
@@ -291,16 +289,18 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
packages: write
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
env:
|
env:
|
||||||
REGISTRY: ghcr.io
|
REGISTRY: ghcr.io
|
||||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Compute image version
|
- name: Compute image version
|
||||||
id: image
|
id: image
|
||||||
uses: actions/github-script@v6
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
@@ -312,7 +312,7 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Log into registry ${{ env.REGISTRY }}
|
- name: Log into registry ${{ env.REGISTRY }}
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -320,7 +320,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
id: build-and-push
|
id: build-and-push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: ./images
|
context: ./images
|
||||||
platforms: |
|
platforms: |
|
||||||
@@ -334,5 +334,13 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
labels: |
|
labels: |
|
||||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
|
||||||
org.opencontainers.image.licenses=MIT
|
org.opencontainers.image.licenses=MIT
|
||||||
|
annotations: |
|
||||||
|
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||||
|
|
||||||
|
- name: Generate attestation
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||||
|
push-to-registry: true
|
||||||
|
|||||||
2
.github/workflows/stale-bot.yml
vendored
2
.github/workflows/stale-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@v10
|
||||||
with:
|
with:
|
||||||
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||||
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
#!/usr/bin/env sh
|
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
|
||||||
. "$(dirname -- "$0")/_/husky.sh"
|
|
||||||
|
|
||||||
cd src/Misc/expressionFunc/hashFiles
|
|
||||||
|
|
||||||
npx lint-staged
|
|
||||||
|
|||||||
1176
.opencode/plans/dap-browser-extension.md
Normal file
1176
.opencode/plans/dap-browser-extension.md
Normal file
File diff suppressed because it is too large
Load Diff
346
.opencode/plans/dap-cancellation-support.md
Normal file
346
.opencode/plans/dap-cancellation-support.md
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
# DAP Cancellation Support
|
||||||
|
|
||||||
|
**Status:** Implemented
|
||||||
|
**Author:** OpenCode
|
||||||
|
**Date:** January 2026
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
When a cancellation signal for the current job comes in from the server, the DAP debugging session doesn't properly respond. If the runner is paused at a breakpoint waiting for debugger commands (or if a debugger never connects), the job gets stuck forever and requires manually deleting the runner.
|
||||||
|
|
||||||
|
### Root Cause
|
||||||
|
|
||||||
|
The `DapDebugSession.WaitForCommandAsync()` method uses a `TaskCompletionSource` that only completes when a DAP command arrives from the debugger. There's no mechanism to interrupt this wait when the job is cancelled externally.
|
||||||
|
|
||||||
|
Additionally, REPL shell commands use `CancellationToken.None`, so they also ignore job cancellation.
|
||||||
|
|
||||||
|
## Solution
|
||||||
|
|
||||||
|
Add proper cancellation token support throughout the DAP debugging flow:
|
||||||
|
|
||||||
|
1. Pass the job cancellation token to `OnStepStartingAsync` and `WaitForCommandAsync`
|
||||||
|
2. Register cancellation callbacks to release blocking waits
|
||||||
|
3. Add a `CancelSession()` method for external cancellation
|
||||||
|
4. Send DAP `terminated` and `exited` events to notify the debugger before cancelling
|
||||||
|
5. Use the cancellation token for REPL shell command execution
|
||||||
|
|
||||||
|
## Progress Checklist
|
||||||
|
|
||||||
|
- [x] **Phase 1:** Update IDapDebugSession interface
|
||||||
|
- [x] **Phase 2:** Update DapDebugSession implementation
|
||||||
|
- [x] **Phase 3:** Update StepsRunner to pass cancellation token
|
||||||
|
- [x] **Phase 4:** Update JobRunner to register cancellation handler
|
||||||
|
- [ ] **Phase 5:** Testing
|
||||||
|
|
||||||
|
## Files to Modify
|
||||||
|
|
||||||
|
| File | Changes |
|
||||||
|
|------|---------|
|
||||||
|
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add cancellation support to `OnStepStartingAsync`, `WaitForCommandAsync`, `ExecuteShellCommandAsync`, add `CancelSession` method |
|
||||||
|
| `src/Runner.Worker/StepsRunner.cs` | Pass `jobContext.CancellationToken` to `OnStepStartingAsync` |
|
||||||
|
| `src/Runner.Worker/JobRunner.cs` | Register cancellation callback to call `CancelSession` on the debug session |
|
||||||
|
|
||||||
|
## Detailed Implementation
|
||||||
|
|
||||||
|
### Phase 1: Update IDapDebugSession Interface
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs` (lines ~144-242)
|
||||||
|
|
||||||
|
Add new method to interface:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
/// <summary>
|
||||||
|
/// Cancels the debug session externally (e.g., job cancellation).
|
||||||
|
/// Sends terminated event to debugger and releases any blocking waits.
|
||||||
|
/// </summary>
|
||||||
|
void CancelSession();
|
||||||
|
```
|
||||||
|
|
||||||
|
Update existing method signature:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Change from:
|
||||||
|
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep);
|
||||||
|
|
||||||
|
// Change to:
|
||||||
|
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 2: Update DapDebugSession Implementation
|
||||||
|
|
||||||
|
#### 2.1 Add cancellation token field
|
||||||
|
|
||||||
|
**Location:** Around line 260-300 (field declarations section)
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Add field to store the job cancellation token for use by REPL commands
|
||||||
|
private CancellationToken _jobCancellationToken;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.2 Update OnStepStartingAsync
|
||||||
|
|
||||||
|
**Location:** Line 1159
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public async Task OnStepStartingAsync(IStep step, IExecutionContext jobContext, bool isFirstStep, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
if (!IsActive)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
_currentStep = step;
|
||||||
|
_jobContext = jobContext;
|
||||||
|
_jobCancellationToken = cancellationToken; // Store for REPL commands
|
||||||
|
|
||||||
|
// ... rest of existing implementation ...
|
||||||
|
|
||||||
|
// Update the WaitForCommandAsync call at line 1212:
|
||||||
|
await WaitForCommandAsync(cancellationToken);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.3 Update WaitForCommandAsync
|
||||||
|
|
||||||
|
**Location:** Line 1288
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private async Task WaitForCommandAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
lock (_stateLock)
|
||||||
|
{
|
||||||
|
_state = DapSessionState.Paused;
|
||||||
|
_commandTcs = new TaskCompletionSource<DapCommand>(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("Waiting for debugger command...");
|
||||||
|
|
||||||
|
// Register cancellation to release the wait
|
||||||
|
using (cancellationToken.Register(() =>
|
||||||
|
{
|
||||||
|
Trace.Info("Job cancellation detected, releasing debugger wait");
|
||||||
|
_commandTcs?.TrySetResult(DapCommand.Disconnect);
|
||||||
|
}))
|
||||||
|
{
|
||||||
|
var command = await _commandTcs.Task;
|
||||||
|
|
||||||
|
Trace.Info($"Received command: {command}");
|
||||||
|
|
||||||
|
lock (_stateLock)
|
||||||
|
{
|
||||||
|
if (_state == DapSessionState.Paused)
|
||||||
|
{
|
||||||
|
_state = DapSessionState.Running;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send continued event (only for normal commands, not cancellation)
|
||||||
|
if (!cancellationToken.IsCancellationRequested &&
|
||||||
|
(command == DapCommand.Continue || command == DapCommand.Next))
|
||||||
|
{
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "continued",
|
||||||
|
Body = new ContinuedEventBody
|
||||||
|
{
|
||||||
|
ThreadId = JobThreadId,
|
||||||
|
AllThreadsContinued = true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.4 Add CancelSession method
|
||||||
|
|
||||||
|
**Location:** After `OnJobCompleted()` method, around line 1286
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
/// <summary>
|
||||||
|
/// Cancels the debug session externally (e.g., job cancellation).
|
||||||
|
/// Sends terminated/exited events to debugger and releases any blocking waits.
|
||||||
|
/// </summary>
|
||||||
|
public void CancelSession()
|
||||||
|
{
|
||||||
|
Trace.Info("CancelSession called - terminating debug session");
|
||||||
|
|
||||||
|
lock (_stateLock)
|
||||||
|
{
|
||||||
|
if (_state == DapSessionState.Terminated)
|
||||||
|
{
|
||||||
|
Trace.Info("Session already terminated, ignoring CancelSession");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_state = DapSessionState.Terminated;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send terminated event to debugger so it updates its UI
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "terminated",
|
||||||
|
Body = new TerminatedEventBody()
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send exited event with cancellation exit code (130 = SIGINT convention)
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "exited",
|
||||||
|
Body = new ExitedEventBody { ExitCode = 130 }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Release any pending command waits
|
||||||
|
_commandTcs?.TrySetResult(DapCommand.Disconnect);
|
||||||
|
|
||||||
|
Trace.Info("Debug session cancelled");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 2.5 Update ExecuteShellCommandAsync
|
||||||
|
|
||||||
|
**Location:** Line 889-895
|
||||||
|
|
||||||
|
Change the `ExecuteAsync` call to use the stored cancellation token:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
int exitCode;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
exitCode = await processInvoker.ExecuteAsync(
|
||||||
|
workingDirectory: workingDirectory,
|
||||||
|
fileName: shell,
|
||||||
|
arguments: string.Format(shellArgs, command),
|
||||||
|
environment: env,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
cancellationToken: _jobCancellationToken); // Changed from CancellationToken.None
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
Trace.Info("Shell command cancelled due to job cancellation");
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = "(cancelled)",
|
||||||
|
Type = "error",
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Shell execution failed: {ex}");
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = $"Error: {ex.Message}",
|
||||||
|
Type = "error",
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 3: Update StepsRunner
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/StepsRunner.cs`
|
||||||
|
**Location:** Line 204
|
||||||
|
|
||||||
|
Change:
|
||||||
|
```csharp
|
||||||
|
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep);
|
||||||
|
```
|
||||||
|
|
||||||
|
To:
|
||||||
|
```csharp
|
||||||
|
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep, jobContext.CancellationToken);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 4: Update JobRunner
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/JobRunner.cs`
|
||||||
|
|
||||||
|
#### 4.1 Add cancellation registration
|
||||||
|
|
||||||
|
**Location:** After line 191 (after "Debugger connected" output), inside the debug mode block:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Register cancellation handler to properly terminate DAP session on job cancellation
|
||||||
|
CancellationTokenRegistration? dapCancellationRegistration = null;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
dapCancellationRegistration = jobRequestCancellationToken.Register(() =>
|
||||||
|
{
|
||||||
|
Trace.Info("Job cancelled - terminating DAP session");
|
||||||
|
debugSession.CancelSession();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Warning($"Failed to register DAP cancellation handler: {ex.Message}");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: The `dapCancellationRegistration` variable should be declared at a higher scope (around line 116 with other declarations) so it can be disposed in the finally block.
|
||||||
|
|
||||||
|
#### 4.2 Dispose the registration
|
||||||
|
|
||||||
|
**Location:** In the finally block (after line 316, alongside dapServer cleanup):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Dispose DAP cancellation registration
|
||||||
|
dapCancellationRegistration?.Dispose();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Behavior Summary
|
||||||
|
|
||||||
|
| Scenario | Before | After |
|
||||||
|
|----------|--------|-------|
|
||||||
|
| Paused at breakpoint, job cancelled | **Stuck forever** | DAP terminated event sent, wait released, job cancels normally |
|
||||||
|
| REPL command running, job cancelled | Command runs forever | Command cancelled, job cancels normally |
|
||||||
|
| Waiting for debugger connection, job cancelled | Already handled | No change (already works) |
|
||||||
|
| Debugger disconnects voluntarily | Works | No change |
|
||||||
|
| Normal step execution, job cancelled | Works | No change (existing cancellation logic handles this) |
|
||||||
|
|
||||||
|
## Exit Code Semantics
|
||||||
|
|
||||||
|
The `exited` event uses these exit codes:
|
||||||
|
- `0` = job succeeded
|
||||||
|
- `1` = job failed
|
||||||
|
- `130` = job cancelled (standard Unix convention for SIGINT/Ctrl+C)
|
||||||
|
|
||||||
|
## Testing Scenarios
|
||||||
|
|
||||||
|
1. **Basic cancellation while paused:**
|
||||||
|
- Start a debug job, let it pause at first step
|
||||||
|
- Cancel the job from GitHub UI
|
||||||
|
- Verify: DAP client receives terminated event, runner exits cleanly
|
||||||
|
|
||||||
|
2. **Cancellation during REPL command:**
|
||||||
|
- Pause at a step, run `!sleep 60` in REPL
|
||||||
|
- Cancel the job from GitHub UI
|
||||||
|
- Verify: Sleep command terminates, DAP client receives terminated event, runner exits cleanly
|
||||||
|
|
||||||
|
3. **Cancellation before debugger connects:**
|
||||||
|
- Start a debug job (it waits for connection)
|
||||||
|
- Cancel the job before connecting a debugger
|
||||||
|
- Verify: Runner exits cleanly (this already works, just verify no regression)
|
||||||
|
|
||||||
|
4. **Normal operation (no cancellation):**
|
||||||
|
- Run through a debug session normally with step/continue
|
||||||
|
- Verify: No change in behavior
|
||||||
|
|
||||||
|
5. **Debugger disconnect:**
|
||||||
|
- Connect debugger, then disconnect it manually
|
||||||
|
- Verify: Job continues to completion (existing behavior preserved)
|
||||||
|
|
||||||
|
## Estimated Effort
|
||||||
|
|
||||||
|
| Phase | Effort |
|
||||||
|
|-------|--------|
|
||||||
|
| Phase 1: Interface update | 15 min |
|
||||||
|
| Phase 2: DapDebugSession implementation | 45 min |
|
||||||
|
| Phase 3: StepsRunner update | 5 min |
|
||||||
|
| Phase 4: JobRunner update | 15 min |
|
||||||
|
| Phase 5: Testing | 30 min |
|
||||||
|
| **Total** | **~2 hours** |
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- DAP Specification: https://microsoft.github.io/debug-adapter-protocol/specification
|
||||||
|
- Related plan: `dap-debugging.md` (original DAP implementation)
|
||||||
511
.opencode/plans/dap-debug-logging.md
Normal file
511
.opencode/plans/dap-debug-logging.md
Normal file
@@ -0,0 +1,511 @@
|
|||||||
|
# DAP Debug Logging Feature
|
||||||
|
|
||||||
|
**Status:** Implemented
|
||||||
|
**Date:** January 2026
|
||||||
|
**Related:** [dap-debugging.md](./dap-debugging.md), [dap-step-backwards.md](./dap-step-backwards.md)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Add comprehensive debug logging to the DAP debugging infrastructure that can be toggled from the DAP client. This helps diagnose issues like step conclusions not updating correctly after step-back operations.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### 1. Debug Log Levels
|
||||||
|
|
||||||
|
| Level | Value | What Gets Logged |
|
||||||
|
|-------|-------|------------------|
|
||||||
|
| `Off` | 0 | Nothing |
|
||||||
|
| `Minimal` | 1 | Errors, critical state changes |
|
||||||
|
| `Normal` | 2 | Step lifecycle, checkpoint operations |
|
||||||
|
| `Verbose` | 3 | Everything including outputs, expressions |
|
||||||
|
|
||||||
|
### 2. Enabling Debug Logging
|
||||||
|
|
||||||
|
#### Via Attach Arguments (nvim-dap config)
|
||||||
|
|
||||||
|
```lua
|
||||||
|
{
|
||||||
|
type = "runner",
|
||||||
|
request = "attach",
|
||||||
|
debugLogging = true, -- Enable debug logging (defaults to "normal" level)
|
||||||
|
debugLogLevel = "verbose", -- Optional: "off", "minimal", "normal", "verbose"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Via REPL Commands (runtime toggle)
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `!debug on` | Enable debug logging (level: normal) |
|
||||||
|
| `!debug off` | Disable debug logging |
|
||||||
|
| `!debug minimal` | Set level to minimal |
|
||||||
|
| `!debug normal` | Set level to normal |
|
||||||
|
| `!debug verbose` | Set level to verbose |
|
||||||
|
| `!debug status` | Show current debug settings |
|
||||||
|
|
||||||
|
### 3. Log Output Format
|
||||||
|
|
||||||
|
All debug logs are sent to the DAP console with the format:
|
||||||
|
|
||||||
|
```
|
||||||
|
[DEBUG] [Category] Message
|
||||||
|
```
|
||||||
|
|
||||||
|
Categories include:
|
||||||
|
- `[Step]` - Step lifecycle events
|
||||||
|
- `[Checkpoint]` - Checkpoint creation/restoration
|
||||||
|
- `[StepsContext]` - Steps context mutations (SetOutcome, SetConclusion, SetOutput, ClearScope)
|
||||||
|
|
||||||
|
### 4. Example Output
|
||||||
|
|
||||||
|
With `!debug verbose` enabled:
|
||||||
|
|
||||||
|
```
|
||||||
|
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
|
||||||
|
[DEBUG] [Step] Checkpoints available: 2
|
||||||
|
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=failure
|
||||||
|
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=failure
|
||||||
|
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Failed
|
||||||
|
[DEBUG] [Step] Context state: outcome=failure, conclusion=failure
|
||||||
|
|
||||||
|
# After step-back:
|
||||||
|
[DEBUG] [Checkpoint] Restoring checkpoint [1] for step 'cat doesnotexist'
|
||||||
|
[DEBUG] [StepsContext] ClearScope: scope='(root)'
|
||||||
|
[DEBUG] [StepsContext] Restoring: clearing scope '(root)', restoring 2 step(s)
|
||||||
|
[DEBUG] [StepsContext] Restored: step='thefoo', outcome=success, conclusion=success
|
||||||
|
|
||||||
|
# After re-running with file created:
|
||||||
|
[DEBUG] [Step] Starting: 'cat doesnotexist' (index=2)
|
||||||
|
[DEBUG] [StepsContext] SetOutcome: step='thecat', outcome=success
|
||||||
|
[DEBUG] [StepsContext] SetConclusion: step='thecat', conclusion=success
|
||||||
|
[DEBUG] [Step] Completed: 'cat doesnotexist', result=Succeeded
|
||||||
|
[DEBUG] [Step] Context state: outcome=success, conclusion=success
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation
|
||||||
|
|
||||||
|
### Progress Checklist
|
||||||
|
|
||||||
|
- [x] **Phase 1:** Add debug logging infrastructure to DapDebugSession
|
||||||
|
- [x] **Phase 2:** Add REPL `!debug` command handling
|
||||||
|
- [x] **Phase 3:** Add OnDebugLog callback to StepsContext
|
||||||
|
- [x] **Phase 4:** Add debug logging calls throughout DapDebugSession
|
||||||
|
- [x] **Phase 5:** Hook up StepsContext logging to DapDebugSession
|
||||||
|
- [ ] **Phase 6:** Testing
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 1: Debug Logging Infrastructure
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||||
|
|
||||||
|
Add enum and helper method:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Add enum for debug log levels (near top of file with other enums)
|
||||||
|
public enum DebugLogLevel
|
||||||
|
{
|
||||||
|
Off = 0,
|
||||||
|
Minimal = 1, // Errors, critical state changes
|
||||||
|
Normal = 2, // Step lifecycle, checkpoints
|
||||||
|
Verbose = 3 // Everything including outputs, expressions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add field (with other private fields)
|
||||||
|
private DebugLogLevel _debugLogLevel = DebugLogLevel.Off;
|
||||||
|
|
||||||
|
// Add helper method (in a #region Debug Logging)
|
||||||
|
private void DebugLog(string message, DebugLogLevel minLevel = DebugLogLevel.Normal)
|
||||||
|
{
|
||||||
|
if (_debugLogLevel >= minLevel)
|
||||||
|
{
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "output",
|
||||||
|
Body = new OutputEventBody
|
||||||
|
{
|
||||||
|
Category = "console",
|
||||||
|
Output = $"[DEBUG] {message}\n"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `HandleAttach` to parse debug logging arguments:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private Response HandleAttach(Request request)
|
||||||
|
{
|
||||||
|
Trace.Info("Attach request handled");
|
||||||
|
|
||||||
|
// Parse debug logging from attach args
|
||||||
|
if (request.Arguments is JsonElement args)
|
||||||
|
{
|
||||||
|
if (args.TryGetProperty("debugLogging", out var debugLogging))
|
||||||
|
{
|
||||||
|
if (debugLogging.ValueKind == JsonValueKind.True)
|
||||||
|
{
|
||||||
|
_debugLogLevel = DebugLogLevel.Normal;
|
||||||
|
Trace.Info("Debug logging enabled via attach args (level: normal)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (args.TryGetProperty("debugLogLevel", out var level) && level.ValueKind == JsonValueKind.String)
|
||||||
|
{
|
||||||
|
_debugLogLevel = level.GetString()?.ToLower() switch
|
||||||
|
{
|
||||||
|
"minimal" => DebugLogLevel.Minimal,
|
||||||
|
"normal" => DebugLogLevel.Normal,
|
||||||
|
"verbose" => DebugLogLevel.Verbose,
|
||||||
|
"off" => DebugLogLevel.Off,
|
||||||
|
_ => _debugLogLevel
|
||||||
|
};
|
||||||
|
Trace.Info($"Debug log level set via attach args: {_debugLogLevel}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return CreateSuccessResponse(null);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 2: REPL `!debug` Command
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||||
|
|
||||||
|
In `HandleEvaluateAsync`, add handling for `!debug` command before other shell command handling:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Near the start of HandleEvaluateAsync, after getting the expression:
|
||||||
|
|
||||||
|
// Check for debug command
|
||||||
|
if (expression.StartsWith("!debug", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return HandleDebugCommand(expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... rest of existing HandleEvaluateAsync code
|
||||||
|
```
|
||||||
|
|
||||||
|
Add the handler method:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private Response HandleDebugCommand(string command)
|
||||||
|
{
|
||||||
|
var parts = command.Split(' ', StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
var arg = parts.Length > 1 ? parts[1].ToLower() : "status";
|
||||||
|
|
||||||
|
string result;
|
||||||
|
switch (arg)
|
||||||
|
{
|
||||||
|
case "on":
|
||||||
|
_debugLogLevel = DebugLogLevel.Normal;
|
||||||
|
result = "Debug logging enabled (level: normal)";
|
||||||
|
break;
|
||||||
|
case "off":
|
||||||
|
_debugLogLevel = DebugLogLevel.Off;
|
||||||
|
result = "Debug logging disabled";
|
||||||
|
break;
|
||||||
|
case "minimal":
|
||||||
|
_debugLogLevel = DebugLogLevel.Minimal;
|
||||||
|
result = "Debug logging set to minimal";
|
||||||
|
break;
|
||||||
|
case "normal":
|
||||||
|
_debugLogLevel = DebugLogLevel.Normal;
|
||||||
|
result = "Debug logging set to normal";
|
||||||
|
break;
|
||||||
|
case "verbose":
|
||||||
|
_debugLogLevel = DebugLogLevel.Verbose;
|
||||||
|
result = "Debug logging set to verbose";
|
||||||
|
break;
|
||||||
|
case "status":
|
||||||
|
default:
|
||||||
|
result = $"Debug logging: {_debugLogLevel}";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return CreateSuccessResponse(new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = result,
|
||||||
|
VariablesReference = 0
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 3: StepsContext OnDebugLog Callback
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/StepsContext.cs`
|
||||||
|
|
||||||
|
Add callback property and helper:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public sealed class StepsContext
|
||||||
|
{
|
||||||
|
private static readonly Regex _propertyRegex = new("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
|
||||||
|
private readonly DictionaryContextData _contextData = new();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Optional callback for debug logging. When set, will be called with debug messages
|
||||||
|
/// for all StepsContext mutations.
|
||||||
|
/// </summary>
|
||||||
|
public Action<string> OnDebugLog { get; set; }
|
||||||
|
|
||||||
|
private void DebugLog(string message)
|
||||||
|
{
|
||||||
|
OnDebugLog?.Invoke(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... rest of class
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `ClearScope`:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public void ClearScope(string scopeName)
|
||||||
|
{
|
||||||
|
DebugLog($"[StepsContext] ClearScope: scope='{scopeName ?? "(root)"}'");
|
||||||
|
if (_contextData.TryGetValue(scopeName, out _))
|
||||||
|
{
|
||||||
|
_contextData[scopeName] = new DictionaryContextData();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `SetOutput`:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public void SetOutput(
|
||||||
|
string scopeName,
|
||||||
|
string stepName,
|
||||||
|
string outputName,
|
||||||
|
string value,
|
||||||
|
out string reference)
|
||||||
|
{
|
||||||
|
var step = GetStep(scopeName, stepName);
|
||||||
|
var outputs = step["outputs"].AssertDictionary("outputs");
|
||||||
|
outputs[outputName] = new StringContextData(value);
|
||||||
|
if (_propertyRegex.IsMatch(outputName))
|
||||||
|
{
|
||||||
|
reference = $"steps.{stepName}.outputs.{outputName}";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reference = $"steps['{stepName}']['outputs']['{outputName}']";
|
||||||
|
}
|
||||||
|
DebugLog($"[StepsContext] SetOutput: step='{stepName}', output='{outputName}', value='{TruncateValue(value)}'");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string TruncateValue(string value, int maxLength = 50)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(value)) return "(empty)";
|
||||||
|
if (value.Length <= maxLength) return value;
|
||||||
|
return value.Substring(0, maxLength) + "...";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `SetConclusion`:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public void SetConclusion(
|
||||||
|
string scopeName,
|
||||||
|
string stepName,
|
||||||
|
ActionResult conclusion)
|
||||||
|
{
|
||||||
|
var step = GetStep(scopeName, stepName);
|
||||||
|
var conclusionStr = conclusion.ToString().ToLowerInvariant();
|
||||||
|
step["conclusion"] = new StringContextData(conclusionStr);
|
||||||
|
DebugLog($"[StepsContext] SetConclusion: step='{stepName}', conclusion={conclusionStr}");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Update `SetOutcome`:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public void SetOutcome(
|
||||||
|
string scopeName,
|
||||||
|
string stepName,
|
||||||
|
ActionResult outcome)
|
||||||
|
{
|
||||||
|
var step = GetStep(scopeName, stepName);
|
||||||
|
var outcomeStr = outcome.ToString().ToLowerInvariant();
|
||||||
|
step["outcome"] = new StringContextData(outcomeStr);
|
||||||
|
DebugLog($"[StepsContext] SetOutcome: step='{stepName}', outcome={outcomeStr}");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 4: DapDebugSession Logging Calls
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||||
|
|
||||||
|
#### In `OnStepStartingAsync` (after setting `_currentStep` and `_jobContext`):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
DebugLog($"[Step] Starting: '{step.DisplayName}' (index={stepIndex})");
|
||||||
|
DebugLog($"[Step] Checkpoints available: {_checkpoints.Count}");
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In `OnStepCompleted` (after logging to Trace):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
DebugLog($"[Step] Completed: '{step.DisplayName}', result={result}");
|
||||||
|
|
||||||
|
// Log current steps context state for this step
|
||||||
|
if (_debugLogLevel >= DebugLogLevel.Normal)
|
||||||
|
{
|
||||||
|
var stepsScope = step.ExecutionContext?.Global?.StepsContext?.GetScope(step.ExecutionContext.ScopeName);
|
||||||
|
if (stepsScope != null && !string.IsNullOrEmpty(step.ExecutionContext?.ContextName))
|
||||||
|
{
|
||||||
|
if (stepsScope.TryGetValue(step.ExecutionContext.ContextName, out var stepData) && stepData is DictionaryContextData sd)
|
||||||
|
{
|
||||||
|
var outcome = sd.TryGetValue("outcome", out var o) && o is StringContextData os ? os.Value : "null";
|
||||||
|
var conclusion = sd.TryGetValue("conclusion", out var c) && c is StringContextData cs ? cs.Value : "null";
|
||||||
|
DebugLog($"[Step] Context state: outcome={outcome}, conclusion={conclusion}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In `CreateCheckpointForPendingStep` (after creating checkpoint):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
DebugLog($"[Checkpoint] Created [{_checkpoints.Count - 1}] for step '{_pendingStep.DisplayName}'");
|
||||||
|
if (_debugLogLevel >= DebugLogLevel.Verbose)
|
||||||
|
{
|
||||||
|
DebugLog($"[Checkpoint] Snapshot contains {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
|
||||||
|
foreach (var entry in checkpoint.StepsSnapshot)
|
||||||
|
{
|
||||||
|
DebugLog($"[Checkpoint] {entry.Key}: outcome={entry.Value.Outcome}, conclusion={entry.Value.Conclusion}", DebugLogLevel.Verbose);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In `RestoreCheckpoint` (at start of method):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
DebugLog($"[Checkpoint] Restoring [{checkpointIndex}] for step '{checkpoint.StepDisplayName}'");
|
||||||
|
if (_debugLogLevel >= DebugLogLevel.Verbose)
|
||||||
|
{
|
||||||
|
DebugLog($"[Checkpoint] Snapshot has {checkpoint.StepsSnapshot.Count} step(s)", DebugLogLevel.Verbose);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In `RestoreStepsContext` (update existing method):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private void RestoreStepsContext(StepsContext stepsContext, Dictionary<string, StepStateSnapshot> snapshot, string scopeName)
|
||||||
|
{
|
||||||
|
scopeName = scopeName ?? string.Empty;
|
||||||
|
|
||||||
|
DebugLog($"[StepsContext] Restoring: clearing scope '{(string.IsNullOrEmpty(scopeName) ? "(root)" : scopeName)}', will restore {snapshot.Count} step(s)");
|
||||||
|
|
||||||
|
stepsContext.ClearScope(scopeName);
|
||||||
|
|
||||||
|
foreach (var entry in snapshot)
|
||||||
|
{
|
||||||
|
var key = entry.Key;
|
||||||
|
var slashIndex = key.IndexOf('/');
|
||||||
|
|
||||||
|
if (slashIndex >= 0)
|
||||||
|
{
|
||||||
|
var snapshotScopeName = slashIndex > 0 ? key.Substring(0, slashIndex) : string.Empty;
|
||||||
|
var stepName = key.Substring(slashIndex + 1);
|
||||||
|
|
||||||
|
if (snapshotScopeName == scopeName)
|
||||||
|
{
|
||||||
|
var state = entry.Value;
|
||||||
|
|
||||||
|
if (state.Outcome.HasValue)
|
||||||
|
{
|
||||||
|
stepsContext.SetOutcome(scopeName, stepName, state.Outcome.Value);
|
||||||
|
}
|
||||||
|
if (state.Conclusion.HasValue)
|
||||||
|
{
|
||||||
|
stepsContext.SetConclusion(scopeName, stepName, state.Conclusion.Value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.Outputs != null)
|
||||||
|
{
|
||||||
|
foreach (var output in state.Outputs)
|
||||||
|
{
|
||||||
|
stepsContext.SetOutput(scopeName, stepName, output.Key, output.Value, out _);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DebugLog($"[StepsContext] Restored: step='{stepName}', outcome={state.Outcome}, conclusion={state.Conclusion}", DebugLogLevel.Verbose);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Steps context restored: cleared scope '{scopeName}' and restored {snapshot.Count} step(s) from snapshot");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 5: Hook Up StepsContext Logging
|
||||||
|
|
||||||
|
**File:** `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||||
|
|
||||||
|
In `OnStepStartingAsync`, after setting `_jobContext`, hook up the callback (only once):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
// Hook up StepsContext debug logging (do this once when we first get jobContext)
|
||||||
|
if (jobContext.Global.StepsContext.OnDebugLog == null)
|
||||||
|
{
|
||||||
|
jobContext.Global.StepsContext.OnDebugLog = (msg) => DebugLog(msg, DebugLogLevel.Verbose);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** StepsContext logging is set to `Verbose` level since `SetOutput` can be noisy. `SetConclusion` and `SetOutcome` will still appear at `Verbose` level, but all the important state changes are also logged directly in `OnStepCompleted` at `Normal` level.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Phase 6: Testing
|
||||||
|
|
||||||
|
#### Manual Testing Checklist
|
||||||
|
|
||||||
|
- [ ] `!debug status` shows "Off" by default
|
||||||
|
- [ ] `!debug on` enables logging, shows step lifecycle
|
||||||
|
- [ ] `!debug verbose` shows StepsContext mutations
|
||||||
|
- [ ] `!debug off` disables logging
|
||||||
|
- [ ] Attach with `debugLogging: true` enables logging on connect
|
||||||
|
- [ ] Attach with `debugLogLevel: "verbose"` sets correct level
|
||||||
|
- [ ] Step-back scenario shows restoration logs
|
||||||
|
- [ ] Logs help identify why conclusion might not update
|
||||||
|
|
||||||
|
#### Test Workflow
|
||||||
|
|
||||||
|
Use the test workflow with `thecat` step:
|
||||||
|
1. Run workflow, let `thecat` fail
|
||||||
|
2. Enable `!debug verbose`
|
||||||
|
3. Step back
|
||||||
|
4. Create the missing file
|
||||||
|
5. Step forward
|
||||||
|
6. Observe logs to see if `SetConclusion` is called with `success`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Files Summary
|
||||||
|
|
||||||
|
### Modified Files
|
||||||
|
|
||||||
|
| File | Changes |
|
||||||
|
|------|---------|
|
||||||
|
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Add `DebugLogLevel` enum, `_debugLogLevel` field, `DebugLog()` helper, `HandleDebugCommand()`, update `HandleAttach`, add logging calls throughout, hook up StepsContext callback |
|
||||||
|
| `src/Runner.Worker/StepsContext.cs` | Add `OnDebugLog` callback, `DebugLog()` helper, `TruncateValue()` helper, add logging to `ClearScope`, `SetOutput`, `SetConclusion`, `SetOutcome` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Future Enhancements (Out of Scope)
|
||||||
|
|
||||||
|
- Additional debug commands (`!debug checkpoints`, `!debug steps`, `!debug env`)
|
||||||
|
- Log to file option
|
||||||
|
- Structured logging with timestamps
|
||||||
|
- Category-based filtering (e.g., only show `[StepsContext]` logs)
|
||||||
|
- Integration with nvim-dap's virtual text for inline debug info
|
||||||
299
.opencode/plans/dap-debugging-fixes.md
Normal file
299
.opencode/plans/dap-debugging-fixes.md
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
# DAP Debugging - Bug Fixes and Enhancements
|
||||||
|
|
||||||
|
**Status:** Planned
|
||||||
|
**Date:** January 2026
|
||||||
|
**Related:** [dap-debugging.md](./dap-debugging.md)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document tracks bug fixes and enhancements for the DAP debugging implementation after the initial phases were completed.
|
||||||
|
|
||||||
|
## Issues
|
||||||
|
|
||||||
|
### Bug 1: Double Output in REPL Shell Commands
|
||||||
|
|
||||||
|
**Symptom:** Running commands in the REPL shell produces double output - the first one unmasked, the second one with secrets masked.
|
||||||
|
|
||||||
|
**Root Cause:** In `DapDebugSession.ExecuteShellCommandAsync()` (lines 670-773), output is sent to the debugger twice:
|
||||||
|
|
||||||
|
1. **Real-time streaming (unmasked):** Lines 678-712 stream output via DAP `output` events as data arrives from the process - but this output is NOT masked
|
||||||
|
2. **Final result (masked):** Lines 765-769 return the combined output as `EvaluateResponseBody.Result` with secrets masked
|
||||||
|
|
||||||
|
The DAP client displays both the streamed events AND the evaluate response result, causing duplication.
|
||||||
|
|
||||||
|
**Fix:**
|
||||||
|
1. Mask secrets in the real-time streaming output (add `HostContext.SecretMasker.MaskSecrets()` to lines ~690 and ~708)
|
||||||
|
2. Change the final `Result` to only show exit code summary instead of full output
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Bug 2: Expressions Interpreted as Shell Commands
|
||||||
|
|
||||||
|
**Symptom:** Evaluating expressions like `${{github.event_name}} == 'push'` in the Watch/Expressions pane results in them being executed as shell commands instead of being evaluated as GitHub Actions expressions.
|
||||||
|
|
||||||
|
**Root Cause:** In `DapDebugSession.HandleEvaluateAsync()` (line 514), the condition to detect shell commands is too broad:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
|
||||||
|
```
|
||||||
|
|
||||||
|
Since `${{github.event_name}}` starts with `$`, it gets routed to shell execution instead of expression evaluation.
|
||||||
|
|
||||||
|
**Fix:**
|
||||||
|
1. Check for `${{` prefix first - these are always GitHub Actions expressions
|
||||||
|
2. Remove the `expression.StartsWith("$")` condition entirely (ambiguous and unnecessary since REPL context handles shell commands)
|
||||||
|
3. Keep `expression.StartsWith("!")` for explicit shell override in non-REPL contexts
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Enhancement: Expression Interpolation in REPL Commands
|
||||||
|
|
||||||
|
**Request:** When running REPL commands like `echo ${{github.event_name}}`, the `${{ }}` expressions should be expanded before shell execution, similar to how `run:` steps work.
|
||||||
|
|
||||||
|
**Approach:** Add a helper method that uses the existing `PipelineTemplateEvaluator` infrastructure to expand expressions in the command string before passing it to the shell.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation Details
|
||||||
|
|
||||||
|
### File: `src/Runner.Worker/Dap/DapDebugSession.cs`
|
||||||
|
|
||||||
|
#### Change 1: Mask Real-Time Streaming Output
|
||||||
|
|
||||||
|
**Location:** Lines ~678-712 (OutputDataReceived and ErrorDataReceived handlers)
|
||||||
|
|
||||||
|
**Before:**
|
||||||
|
```csharp
|
||||||
|
processInvoker.OutputDataReceived += (sender, args) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
|
{
|
||||||
|
output.AppendLine(args.Data);
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "output",
|
||||||
|
Body = new OutputEventBody
|
||||||
|
{
|
||||||
|
Category = "stdout",
|
||||||
|
Output = args.Data + "\n" // NOT MASKED
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**After:**
|
||||||
|
```csharp
|
||||||
|
processInvoker.OutputDataReceived += (sender, args) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(args.Data))
|
||||||
|
{
|
||||||
|
output.AppendLine(args.Data);
|
||||||
|
var maskedData = HostContext.SecretMasker.MaskSecrets(args.Data);
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "output",
|
||||||
|
Body = new OutputEventBody
|
||||||
|
{
|
||||||
|
Category = "stdout",
|
||||||
|
Output = maskedData + "\n"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Apply the same change to `ErrorDataReceived` handler (~lines 696-712).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Change 2: Return Only Exit Code in Result
|
||||||
|
|
||||||
|
**Location:** Lines ~767-772 (return statement in ExecuteShellCommandAsync)
|
||||||
|
|
||||||
|
**Before:**
|
||||||
|
```csharp
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = result.TrimEnd('\r', '\n'),
|
||||||
|
Type = exitCode == 0 ? "string" : "error",
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
**After:**
|
||||||
|
```csharp
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = $"(exit code: {exitCode})",
|
||||||
|
Type = exitCode == 0 ? "string" : "error",
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Also remove the result combination logic (lines ~747-762) since we no longer need to build the full result string for the response.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Change 3: Fix Expression vs Shell Routing
|
||||||
|
|
||||||
|
**Location:** Lines ~511-536 (HandleEvaluateAsync method)
|
||||||
|
|
||||||
|
**Before:**
|
||||||
|
```csharp
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Check if this is a REPL/shell command (context: "repl") or starts with shell prefix
|
||||||
|
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
|
||||||
|
{
|
||||||
|
// Shell execution mode
|
||||||
|
var command = expression.TrimStart('!', '$').Trim();
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Expression evaluation mode
|
||||||
|
var result = EvaluateExpression(expression, executionContext);
|
||||||
|
return CreateSuccessResponse(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**After:**
|
||||||
|
```csharp
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// GitHub Actions expressions start with "${{" - always evaluate as expressions
|
||||||
|
if (expression.StartsWith("${{"))
|
||||||
|
{
|
||||||
|
var result = EvaluateExpression(expression, executionContext);
|
||||||
|
return CreateSuccessResponse(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a REPL/shell command:
|
||||||
|
// - context is "repl" (from Debug Console pane)
|
||||||
|
// - expression starts with "!" (explicit shell prefix for Watch pane)
|
||||||
|
if (evalContext == "repl" || expression.StartsWith("!"))
|
||||||
|
{
|
||||||
|
// Shell execution mode
|
||||||
|
var command = expression.TrimStart('!').Trim();
|
||||||
|
if (string.IsNullOrEmpty(command))
|
||||||
|
{
|
||||||
|
return CreateSuccessResponse(new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = "(empty command)",
|
||||||
|
Type = "string",
|
||||||
|
VariablesReference = 0
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
var result = await ExecuteShellCommandAsync(command, executionContext);
|
||||||
|
return CreateSuccessResponse(result);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Expression evaluation mode (Watch pane, hover, etc.)
|
||||||
|
var result = EvaluateExpression(expression, executionContext);
|
||||||
|
return CreateSuccessResponse(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Change 4: Add Expression Expansion Helper Method
|
||||||
|
|
||||||
|
**Location:** Add new method before `ExecuteShellCommandAsync` (~line 667)
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
/// <summary>
|
||||||
|
/// Expands ${{ }} expressions within a command string.
|
||||||
|
/// For example: "echo ${{github.event_name}}" -> "echo push"
|
||||||
|
/// </summary>
|
||||||
|
private string ExpandExpressionsInCommand(string command, IExecutionContext context)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(command) || !command.Contains("${{"))
|
||||||
|
{
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Create a StringToken with the command
|
||||||
|
var token = new StringToken(null, null, null, command);
|
||||||
|
|
||||||
|
// Use the template evaluator to expand expressions
|
||||||
|
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||||
|
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||||
|
token,
|
||||||
|
context.ExpressionValues,
|
||||||
|
context.ExpressionFunctions);
|
||||||
|
|
||||||
|
// Mask secrets in the expanded command
|
||||||
|
result = HostContext.SecretMasker.MaskSecrets(result ?? command);
|
||||||
|
|
||||||
|
Trace.Info($"Expanded command: {result}");
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Info($"Expression expansion failed, using original command: {ex.Message}");
|
||||||
|
return command;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Required import:** Add `using GitHub.DistributedTask.ObjectTemplating.Tokens;` at the top of the file if not already present.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### Change 5: Use Expression Expansion in Shell Execution
|
||||||
|
|
||||||
|
**Location:** Beginning of `ExecuteShellCommandAsync` method (~line 670)
|
||||||
|
|
||||||
|
**Before:**
|
||||||
|
```csharp
|
||||||
|
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||||
|
{
|
||||||
|
Trace.Info($"Executing shell command: {command}");
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**After:**
|
||||||
|
```csharp
|
||||||
|
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||||
|
{
|
||||||
|
// Expand ${{ }} expressions in the command first
|
||||||
|
command = ExpandExpressionsInCommand(command, context);
|
||||||
|
|
||||||
|
Trace.Info($"Executing shell command: {command}");
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## DAP Context Reference
|
||||||
|
|
||||||
|
For future reference, these are the DAP evaluate context values:
|
||||||
|
|
||||||
|
| DAP Context | Source UI | Behavior |
|
||||||
|
|-------------|-----------|----------|
|
||||||
|
| `"repl"` | Debug Console / REPL pane | Shell execution (with expression expansion) |
|
||||||
|
| `"watch"` | Watch / Expressions pane | Expression evaluation |
|
||||||
|
| `"hover"` | Editor hover (default) | Expression evaluation |
|
||||||
|
| `"variables"` | Variables pane | Expression evaluation |
|
||||||
|
| `"clipboard"` | Copy to clipboard | Expression evaluation |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing Checklist
|
||||||
|
|
||||||
|
- [ ] REPL command output is masked and appears only once
|
||||||
|
- [ ] REPL command shows exit code in result field
|
||||||
|
- [ ] Expression `${{github.event_name}}` evaluates correctly in Watch pane
|
||||||
|
- [ ] Expression `${{github.event_name}} == 'push'` evaluates correctly
|
||||||
|
- [ ] REPL command `echo ${{github.event_name}}` expands and executes correctly
|
||||||
|
- [ ] REPL command `!ls -la` from Watch pane works (explicit shell prefix)
|
||||||
|
- [ ] Secrets are masked in all outputs (streaming and expanded commands)
|
||||||
536
.opencode/plans/dap-debugging.md
Normal file
536
.opencode/plans/dap-debugging.md
Normal file
@@ -0,0 +1,536 @@
|
|||||||
|
# DAP-Based Debugging for GitHub Actions Runner
|
||||||
|
|
||||||
|
**Status:** Draft
|
||||||
|
**Author:** GitHub Actions Team
|
||||||
|
**Date:** January 2026
|
||||||
|
|
||||||
|
## Progress Checklist
|
||||||
|
|
||||||
|
- [x] **Phase 1:** DAP Protocol Infrastructure (DapMessages.cs, DapServer.cs, basic DapDebugSession.cs)
|
||||||
|
- [x] **Phase 2:** Debug Session Logic (DapVariableProvider.cs, variable inspection, step history tracking)
|
||||||
|
- [x] **Phase 3:** StepsRunner Integration (pause hooks before/after step execution)
|
||||||
|
- [x] **Phase 4:** Expression Evaluation & Shell (REPL)
|
||||||
|
- [x] **Phase 5:** Startup Integration (JobRunner.cs modifications)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document describes the implementation of Debug Adapter Protocol (DAP) support in the GitHub Actions runner, enabling rich debugging of workflow jobs from any DAP-compatible editor (nvim-dap, VS Code, etc.).
|
||||||
|
|
||||||
|
## Goals
|
||||||
|
|
||||||
|
- **Primary:** Create a working demo to demonstrate the feasibility of DAP-based workflow debugging
|
||||||
|
- **Non-goal:** Production-ready, polished implementation (this is proof-of-concept)
|
||||||
|
|
||||||
|
## User Experience
|
||||||
|
|
||||||
|
1. User re-runs a failed job with "Enable debug logging" checked in GitHub UI
|
||||||
|
2. Runner (running locally) detects debug mode and starts DAP server on port 4711
|
||||||
|
3. Runner prints "Waiting for debugger on port 4711..." and pauses
|
||||||
|
4. User opens editor (nvim with nvim-dap), connects to debugger
|
||||||
|
5. Job execution begins, pausing before the first step
|
||||||
|
6. User can:
|
||||||
|
- **Inspect variables:** View `github`, `env`, `inputs`, `steps`, `secrets` (redacted), `runner`, `job` contexts
|
||||||
|
- **Evaluate expressions:** `${{ github.event.pull_request.title }}`
|
||||||
|
- **Execute shell commands:** Run arbitrary commands in the job's environment (REPL)
|
||||||
|
- **Step through job:** `next` moves to next step, `continue` runs to end
|
||||||
|
- **Pause after steps:** Inspect step outputs before continuing
|
||||||
|
|
||||||
|
## Activation
|
||||||
|
|
||||||
|
DAP debugging activates automatically when the job is in debug mode:
|
||||||
|
|
||||||
|
- User enables "Enable debug logging" when re-running a job in GitHub UI
|
||||||
|
- Server sends `ACTIONS_STEP_DEBUG=true` in job variables
|
||||||
|
- Runner sets `Global.WriteDebug = true` and `runner.debug = "1"`
|
||||||
|
- DAP server starts on port 4711
|
||||||
|
|
||||||
|
**No additional configuration required.**
|
||||||
|
|
||||||
|
### Optional Configuration
|
||||||
|
|
||||||
|
| Environment Variable | Default | Description |
|
||||||
|
|---------------------|---------|-------------|
|
||||||
|
| `ACTIONS_DAP_PORT` | `4711` | TCP port for DAP server (optional override) |
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────┐ ┌─────────────────────────────────────────┐
|
||||||
|
│ nvim-dap │ │ Runner.Worker │
|
||||||
|
│ (DAP Client) │◄───TCP:4711───────►│ ┌─────────────────────────────────┐ │
|
||||||
|
│ │ │ │ DapServer │ │
|
||||||
|
└─────────────────────┘ │ │ - TCP listener │ │
|
||||||
|
│ │ - DAP JSON protocol │ │
|
||||||
|
│ └──────────────┬──────────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌──────────────▼──────────────────┐ │
|
||||||
|
│ │ DapDebugSession │ │
|
||||||
|
│ │ - Debug state management │ │
|
||||||
|
│ │ - Step coordination │ │
|
||||||
|
│ │ - Variable exposure │ │
|
||||||
|
│ │ - Expression evaluation │ │
|
||||||
|
│ │ - Shell execution (REPL) │ │
|
||||||
|
│ └──────────────┬──────────────────┘ │
|
||||||
|
│ │ │
|
||||||
|
│ ┌──────────────▼──────────────────┐ │
|
||||||
|
│ │ StepsRunner (modified) │ │
|
||||||
|
│ │ - Pause before/after steps │ │
|
||||||
|
│ │ - Notify debug session │ │
|
||||||
|
│ └─────────────────────────────────┘ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## DAP Concept Mapping
|
||||||
|
|
||||||
|
| DAP Concept | Actions Runner Equivalent |
|
||||||
|
|-------------|---------------------------|
|
||||||
|
| Thread | Single job execution |
|
||||||
|
| Stack Frame | Current step + completed steps (step history) |
|
||||||
|
| Scope | Context category: `github`, `env`, `inputs`, `steps`, `secrets`, `runner`, `job` |
|
||||||
|
| Variable | Individual context values |
|
||||||
|
| Breakpoint | Pause before specific step (future enhancement) |
|
||||||
|
| Step Over (Next) | Execute current step, pause before next |
|
||||||
|
| Continue | Run until job end |
|
||||||
|
| Evaluate | Evaluate `${{ }}` expressions OR execute shell commands (REPL) |
|
||||||
|
|
||||||
|
## File Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/Runner.Worker/
|
||||||
|
├── Dap/
|
||||||
|
│ ├── DapServer.cs # TCP listener, JSON protocol handling
|
||||||
|
│ ├── DapDebugSession.cs # Debug state, step coordination
|
||||||
|
│ ├── DapMessages.cs # DAP protocol message types
|
||||||
|
│ └── DapVariableProvider.cs # Converts ExecutionContext to DAP variables
|
||||||
|
```
|
||||||
|
|
||||||
|
## Implementation Phases
|
||||||
|
|
||||||
|
### Phase 1: DAP Protocol Infrastructure
|
||||||
|
|
||||||
|
#### 1.1 Protocol Messages (`Dap/DapMessages.cs`)
|
||||||
|
|
||||||
|
Base message types following DAP spec:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public abstract class ProtocolMessage
|
||||||
|
{
|
||||||
|
public int seq { get; set; }
|
||||||
|
public string type { get; set; } // "request", "response", "event"
|
||||||
|
}
|
||||||
|
|
||||||
|
public class Request : ProtocolMessage
|
||||||
|
{
|
||||||
|
public string command { get; set; }
|
||||||
|
public object arguments { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public class Response : ProtocolMessage
|
||||||
|
{
|
||||||
|
public int request_seq { get; set; }
|
||||||
|
public bool success { get; set; }
|
||||||
|
public string command { get; set; }
|
||||||
|
public string message { get; set; }
|
||||||
|
public object body { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public class Event : ProtocolMessage
|
||||||
|
{
|
||||||
|
public string @event { get; set; }
|
||||||
|
public object body { get; set; }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Message framing: `Content-Length: N\r\n\r\n{json}`
|
||||||
|
|
||||||
|
#### 1.2 DAP Server (`Dap/DapServer.cs`)
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
[ServiceLocator(Default = typeof(DapServer))]
|
||||||
|
public interface IDapServer : IRunnerService
|
||||||
|
{
|
||||||
|
Task StartAsync(int port);
|
||||||
|
Task WaitForConnectionAsync();
|
||||||
|
Task StopAsync();
|
||||||
|
void SendEvent(Event evt);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class DapServer : RunnerService, IDapServer
|
||||||
|
{
|
||||||
|
private TcpListener _listener;
|
||||||
|
private TcpClient _client;
|
||||||
|
private IDapDebugSession _session;
|
||||||
|
|
||||||
|
// TCP listener on configurable port
|
||||||
|
// Single-client connection
|
||||||
|
// Async read/write loop
|
||||||
|
// Dispatch requests to DapDebugSession
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 2: Debug Session Logic
|
||||||
|
|
||||||
|
#### 2.1 Debug Session (`Dap/DapDebugSession.cs`)
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public enum DapCommand { Continue, Next, Pause, Disconnect }
|
||||||
|
public enum PauseReason { Entry, Step, Breakpoint, Pause }
|
||||||
|
|
||||||
|
[ServiceLocator(Default = typeof(DapDebugSession))]
|
||||||
|
public interface IDapDebugSession : IRunnerService
|
||||||
|
{
|
||||||
|
bool IsActive { get; }
|
||||||
|
|
||||||
|
// Called by DapServer
|
||||||
|
void Initialize(InitializeRequestArguments args);
|
||||||
|
void Attach(AttachRequestArguments args);
|
||||||
|
void ConfigurationDone();
|
||||||
|
Task<DapCommand> WaitForCommandAsync();
|
||||||
|
|
||||||
|
// Called by StepsRunner
|
||||||
|
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext);
|
||||||
|
void OnStepCompleted(IStep step);
|
||||||
|
|
||||||
|
// DAP requests
|
||||||
|
ThreadsResponse GetThreads();
|
||||||
|
StackTraceResponse GetStackTrace(int threadId);
|
||||||
|
ScopesResponse GetScopes(int frameId);
|
||||||
|
VariablesResponse GetVariables(int variablesReference);
|
||||||
|
EvaluateResponse Evaluate(string expression, string context);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class DapDebugSession : RunnerService, IDapDebugSession
|
||||||
|
{
|
||||||
|
private IExecutionContext _jobContext;
|
||||||
|
private IStep _currentStep;
|
||||||
|
private readonly List<IStep> _completedSteps = new();
|
||||||
|
private TaskCompletionSource<DapCommand> _commandTcs;
|
||||||
|
private bool _pauseAfterStep = false;
|
||||||
|
|
||||||
|
// Object reference management for nested variables
|
||||||
|
private int _nextVariableReference = 1;
|
||||||
|
private readonly Dictionary<int, object> _variableReferences = new();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Core state machine:
|
||||||
|
1. **Waiting for client:** Server started, no client connected
|
||||||
|
2. **Initializing:** Client connected, exchanging capabilities
|
||||||
|
3. **Ready:** `configurationDone` received, waiting to start
|
||||||
|
4. **Paused (before step):** Stopped before step execution, waiting for command
|
||||||
|
5. **Running:** Executing a step
|
||||||
|
6. **Paused (after step):** Stopped after step execution, waiting for command
|
||||||
|
|
||||||
|
#### 2.2 Variable Provider (`Dap/DapVariableProvider.cs`)
|
||||||
|
|
||||||
|
Maps `ExecutionContext.ExpressionValues` to DAP scopes and variables:
|
||||||
|
|
||||||
|
| Scope | Source | Notes |
|
||||||
|
|-------|--------|-------|
|
||||||
|
| `github` | `ExpressionValues["github"]` | Full github context |
|
||||||
|
| `env` | `ExpressionValues["env"]` | Environment variables |
|
||||||
|
| `inputs` | `ExpressionValues["inputs"]` | Step inputs (when available) |
|
||||||
|
| `steps` | `Global.StepsContext.GetScope()` | Completed step outputs |
|
||||||
|
| `secrets` | `ExpressionValues["secrets"]` | Keys shown, values = `[REDACTED]` |
|
||||||
|
| `runner` | `ExpressionValues["runner"]` | Runner context |
|
||||||
|
| `job` | `ExpressionValues["job"]` | Job status |
|
||||||
|
|
||||||
|
Nested objects (e.g., `github.event.pull_request`) become expandable variables with child references.
|
||||||
|
|
||||||
|
### Phase 3: StepsRunner Integration
|
||||||
|
|
||||||
|
#### 3.1 Modify `StepsRunner.cs`
|
||||||
|
|
||||||
|
Add debug hooks at step boundaries:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
public async Task RunAsync(IExecutionContext jobContext)
|
||||||
|
{
|
||||||
|
// Get debug session if available
|
||||||
|
var debugSession = HostContext.TryGetService<IDapDebugSession>();
|
||||||
|
bool isFirstStep = true;
|
||||||
|
|
||||||
|
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
|
||||||
|
{
|
||||||
|
// ... existing dequeue logic ...
|
||||||
|
|
||||||
|
var step = jobContext.JobSteps.Dequeue();
|
||||||
|
|
||||||
|
// Pause BEFORE step execution
|
||||||
|
if (debugSession?.IsActive == true)
|
||||||
|
{
|
||||||
|
var reason = isFirstStep ? PauseReason.Entry : PauseReason.Step;
|
||||||
|
await debugSession.OnStepStartingAsync(step, jobContext, reason);
|
||||||
|
isFirstStep = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ... existing step execution (condition eval, RunStepAsync, etc.) ...
|
||||||
|
|
||||||
|
// Pause AFTER step execution (if requested)
|
||||||
|
if (debugSession?.IsActive == true)
|
||||||
|
{
|
||||||
|
debugSession.OnStepCompleted(step);
|
||||||
|
// Session may pause here to let user inspect outputs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 4: Expression Evaluation & Shell (REPL)
|
||||||
|
|
||||||
|
#### 4.1 Expression Evaluation
|
||||||
|
|
||||||
|
Reuse existing `PipelineTemplateEvaluator`:
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
|
||||||
|
{
|
||||||
|
// Strip ${{ }} wrapper if present
|
||||||
|
var expr = expression.Trim();
|
||||||
|
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
|
||||||
|
{
|
||||||
|
expr = expr.Substring(3, expr.Length - 5).Trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
var expressionToken = new BasicExpressionToken(fileId: null, line: null, column: null, expression: expr);
|
||||||
|
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||||
|
|
||||||
|
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||||
|
expressionToken,
|
||||||
|
context.ExpressionValues,
|
||||||
|
context.ExpressionFunctions
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mask secrets and determine type
|
||||||
|
result = HostContext.SecretMasker.MaskSecrets(result ?? "null");
|
||||||
|
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = result,
|
||||||
|
Type = DetermineResultType(result),
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Supported expression formats:**
|
||||||
|
- Plain expression: `github.ref`, `steps.build.outputs.result`
|
||||||
|
- Wrapped expression: `${{ github.event.pull_request.title }}`
|
||||||
|
|
||||||
|
#### 4.2 Shell Execution (REPL)
|
||||||
|
|
||||||
|
Shell execution is triggered when:
|
||||||
|
1. The evaluate request has `context: "repl"`, OR
|
||||||
|
2. The expression starts with `!` (e.g., `!ls -la`), OR
|
||||||
|
3. The expression starts with `$` followed by a shell command (e.g., `$env`)
|
||||||
|
|
||||||
|
**Usage examples in debug console:**
|
||||||
|
```
|
||||||
|
!ls -la # List files in workspace
|
||||||
|
!env | grep GITHUB # Show GitHub environment variables
|
||||||
|
!cat $GITHUB_EVENT_PATH # View the event payload
|
||||||
|
!echo ${{ github.ref }} # Mix shell and expression (evaluated first)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
|
||||||
|
{
|
||||||
|
var processInvoker = HostContext.CreateService<IProcessInvoker>();
|
||||||
|
var output = new StringBuilder();
|
||||||
|
|
||||||
|
processInvoker.OutputDataReceived += (sender, args) =>
|
||||||
|
{
|
||||||
|
output.AppendLine(args.Data);
|
||||||
|
// Stream to client in real-time via DAP output event
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "output",
|
||||||
|
Body = new OutputEventBody { Category = "stdout", Output = args.Data + "\n" }
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
processInvoker.ErrorDataReceived += (sender, args) =>
|
||||||
|
{
|
||||||
|
_server?.SendEvent(new Event
|
||||||
|
{
|
||||||
|
EventType = "output",
|
||||||
|
Body = new OutputEventBody { Category = "stderr", Output = args.Data + "\n" }
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build environment from job context (includes GITHUB_*, env context, prepend path)
|
||||||
|
var env = BuildShellEnvironment(context);
|
||||||
|
var workDir = GetWorkingDirectory(context); // Uses github.workspace
|
||||||
|
var (shell, shellArgs) = GetDefaultShell(); // Platform-specific detection
|
||||||
|
|
||||||
|
int exitCode = await processInvoker.ExecuteAsync(
|
||||||
|
workingDirectory: workDir,
|
||||||
|
fileName: shell,
|
||||||
|
arguments: string.Format(shellArgs, command),
|
||||||
|
environment: env,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
cancellationToken: CancellationToken.None
|
||||||
|
);
|
||||||
|
|
||||||
|
return new EvaluateResponseBody
|
||||||
|
{
|
||||||
|
Result = HostContext.SecretMasker.MaskSecrets(output.ToString()),
|
||||||
|
Type = exitCode == 0 ? "string" : "error",
|
||||||
|
VariablesReference = 0
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Shell detection by platform:**
|
||||||
|
|
||||||
|
| Platform | Priority | Shell | Arguments |
|
||||||
|
|----------|----------|-------|-----------|
|
||||||
|
| Windows | 1 | `pwsh` | `-NoProfile -NonInteractive -Command "{0}"` |
|
||||||
|
| Windows | 2 | `powershell` | `-NoProfile -NonInteractive -Command "{0}"` |
|
||||||
|
| Windows | 3 | `cmd.exe` | `/C "{0}"` |
|
||||||
|
| Unix | 1 | `bash` | `-c "{0}"` |
|
||||||
|
| Unix | 2 | `sh` | `-c "{0}"` |
|
||||||
|
|
||||||
|
**Environment built for shell commands:**
|
||||||
|
- Current system environment variables
|
||||||
|
- GitHub Actions context variables (from `IEnvironmentContextData.GetRuntimeEnvironmentVariables()`)
|
||||||
|
- Prepend path from job context added to `PATH`
|
||||||
|
|
||||||
|
### Phase 5: Startup Integration
|
||||||
|
|
||||||
|
#### 5.1 Modify `JobRunner.cs`
|
||||||
|
|
||||||
|
Add DAP server startup after debug mode is detected (around line 159):
|
||||||
|
|
||||||
|
```csharp
|
||||||
|
if (jobContext.Global.WriteDebug)
|
||||||
|
{
|
||||||
|
jobContext.SetRunnerContext("debug", "1");
|
||||||
|
|
||||||
|
// Start DAP server for interactive debugging
|
||||||
|
var dapServer = HostContext.GetService<IDapServer>();
|
||||||
|
var port = int.Parse(
|
||||||
|
Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT") ?? "4711");
|
||||||
|
|
||||||
|
await dapServer.StartAsync(port);
|
||||||
|
Trace.Info($"DAP server listening on port {port}");
|
||||||
|
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
|
||||||
|
|
||||||
|
// Block until debugger connects
|
||||||
|
await dapServer.WaitForConnectionAsync();
|
||||||
|
Trace.Info("DAP client connected, continuing job execution");
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## DAP Capabilities
|
||||||
|
|
||||||
|
Capabilities to advertise in `InitializeResponse`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"supportsConfigurationDoneRequest": true,
|
||||||
|
"supportsEvaluateForHovers": true,
|
||||||
|
"supportsTerminateDebuggee": true,
|
||||||
|
"supportsStepBack": false,
|
||||||
|
"supportsSetVariable": false,
|
||||||
|
"supportsRestartFrame": false,
|
||||||
|
"supportsGotoTargetsRequest": false,
|
||||||
|
"supportsStepInTargetsRequest": false,
|
||||||
|
"supportsCompletionsRequest": false,
|
||||||
|
"supportsModulesRequest": false,
|
||||||
|
"supportsExceptionOptions": false,
|
||||||
|
"supportsValueFormattingOptions": false,
|
||||||
|
"supportsExceptionInfoRequest": false,
|
||||||
|
"supportsDelayedStackTraceLoading": false,
|
||||||
|
"supportsLoadedSourcesRequest": false,
|
||||||
|
"supportsProgressReporting": false,
|
||||||
|
"supportsRunInTerminalRequest": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Client Configuration (nvim-dap)
|
||||||
|
|
||||||
|
Example configuration for nvim-dap:
|
||||||
|
|
||||||
|
```lua
|
||||||
|
local dap = require('dap')
|
||||||
|
|
||||||
|
dap.adapters.actions = {
|
||||||
|
type = 'server',
|
||||||
|
host = '127.0.0.1',
|
||||||
|
port = 4711,
|
||||||
|
}
|
||||||
|
|
||||||
|
dap.configurations.yaml = {
|
||||||
|
{
|
||||||
|
type = 'actions',
|
||||||
|
request = 'attach',
|
||||||
|
name = 'Attach to Actions Runner',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Demo Flow
|
||||||
|
|
||||||
|
1. Trigger job re-run with "Enable debug logging" checked in GitHub UI
|
||||||
|
2. Runner starts, detects debug mode (`Global.WriteDebug == true`)
|
||||||
|
3. DAP server starts, console shows: `DAP debugger waiting for connection on port 4711...`
|
||||||
|
4. In nvim: `:lua require('dap').continue()`
|
||||||
|
5. Connection established, capabilities exchanged
|
||||||
|
6. Job begins, pauses before first step
|
||||||
|
7. nvim shows "stopped" state, variables panel shows contexts
|
||||||
|
8. User explores variables, evaluates expressions, runs shell commands
|
||||||
|
9. User presses `n` (next) to advance to next step
|
||||||
|
10. After step completes, user can inspect outputs before continuing
|
||||||
|
11. Repeat until job completes
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
1. **Unit tests:** DAP protocol serialization, variable provider mapping
|
||||||
|
2. **Integration tests:** Mock DAP client verifying request/response sequences
|
||||||
|
3. **Manual testing:** Real job with nvim-dap attached
|
||||||
|
|
||||||
|
## Future Enhancements (Out of Scope for Demo)
|
||||||
|
|
||||||
|
- Composite action step-in (expand into sub-steps)
|
||||||
|
- Breakpoints on specific step names
|
||||||
|
- Watch expressions
|
||||||
|
- Conditional breakpoints
|
||||||
|
- Remote debugging (runner not on localhost)
|
||||||
|
- VS Code extension
|
||||||
|
|
||||||
|
## Estimated Effort
|
||||||
|
|
||||||
|
| Phase | Effort |
|
||||||
|
|-------|--------|
|
||||||
|
| Phase 1: Protocol Infrastructure | 4-6 hours |
|
||||||
|
| Phase 2: Debug Session Logic | 4-6 hours |
|
||||||
|
| Phase 3: StepsRunner Integration | 2-3 hours |
|
||||||
|
| Phase 4: Expression & Shell | 3-4 hours |
|
||||||
|
| Phase 5: Startup & Polish | 2-3 hours |
|
||||||
|
| **Total** | **~2-3 days** |
|
||||||
|
|
||||||
|
## Key Files to Modify
|
||||||
|
|
||||||
|
| File | Changes |
|
||||||
|
|------|---------|
|
||||||
|
| `src/Runner.Worker/JobRunner.cs` | Start DAP server when debug mode enabled |
|
||||||
|
| `src/Runner.Worker/StepsRunner.cs` | Add pause hooks before/after step execution |
|
||||||
|
| `src/Runner.Worker/Runner.Worker.csproj` | Add new Dap/ folder files |
|
||||||
|
|
||||||
|
## Key Files to Create
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| `src/Runner.Worker/Dap/DapServer.cs` | TCP server, protocol framing |
|
||||||
|
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Debug state machine, command handling |
|
||||||
|
| `src/Runner.Worker/Dap/DapMessages.cs` | Protocol message types |
|
||||||
|
| `src/Runner.Worker/Dap/DapVariableProvider.cs` | Context → DAP variable conversion |
|
||||||
|
|
||||||
|
## Reference Links
|
||||||
|
|
||||||
|
- [DAP Overview](https://microsoft.github.io/debug-adapter-protocol/overview)
|
||||||
|
- [DAP Specification](https://microsoft.github.io/debug-adapter-protocol/specification)
|
||||||
|
- [Enable Debug Logging (GitHub Docs)](https://docs.github.com/en/actions/how-tos/monitor-workflows/enable-debug-logging)
|
||||||
1116
.opencode/plans/dap-step-backwards.md
Normal file
1116
.opencode/plans/dap-step-backwards.md
Normal file
File diff suppressed because it is too large
Load Diff
18
README.md
18
README.md
@@ -20,6 +20,20 @@ Runner releases:
|
|||||||
|
|
||||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
||||||
|
|
||||||
## Contribute
|
### Note
|
||||||
|
|
||||||
We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing.
|
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||||
|
|
||||||
|
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||||
|
|
||||||
|
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||||
|
|
||||||
|
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||||
|
|
||||||
|
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||||
|
|
||||||
|
3. Security Issues should be handled as per our [security.md](security.md)
|
||||||
|
|
||||||
|
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||||
|
|
||||||
|
You are welcome to still raise bugs in this repo.
|
||||||
|
|||||||
176
browser-ext/README.md
Normal file
176
browser-ext/README.md
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
# Actions DAP Debugger - Browser Extension
|
||||||
|
|
||||||
|
A Chrome extension that enables interactive debugging of GitHub Actions workflows directly in the browser. Connects to the runner's DAP server via a WebSocket proxy.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Variable Inspection**: Browse workflow context variables (`github`, `env`, `steps`, etc.)
|
||||||
|
- **REPL Console**: Evaluate expressions and run shell commands
|
||||||
|
- **Step Control**: Step forward, step back, continue, and reverse continue
|
||||||
|
- **GitHub Integration**: Debugger pane injects directly into the job page
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Start the WebSocket Proxy
|
||||||
|
|
||||||
|
The proxy bridges WebSocket connections from the browser to the DAP TCP server.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd browser-ext/proxy
|
||||||
|
npm install
|
||||||
|
node proxy.js
|
||||||
|
```
|
||||||
|
|
||||||
|
The proxy listens on `ws://localhost:4712` and connects to the DAP server at `tcp://localhost:4711`.
|
||||||
|
|
||||||
|
### 2. Load the Extension in Chrome
|
||||||
|
|
||||||
|
1. Open Chrome and navigate to `chrome://extensions/`
|
||||||
|
2. Enable "Developer mode" (toggle in top right)
|
||||||
|
3. Click "Load unpacked"
|
||||||
|
4. Select the `browser-ext` directory
|
||||||
|
|
||||||
|
### 3. Start a Debug Session
|
||||||
|
|
||||||
|
1. Go to your GitHub repository
|
||||||
|
2. Navigate to Actions and select a workflow run
|
||||||
|
3. Click "Re-run jobs" → check "Enable debug logging"
|
||||||
|
4. Wait for the runner to display "DAP debugger waiting for connection..."
|
||||||
|
|
||||||
|
### 4. Connect the Extension
|
||||||
|
|
||||||
|
1. Navigate to the job page (`github.com/.../actions/runs/.../job/...`)
|
||||||
|
2. Click the extension icon in Chrome toolbar
|
||||||
|
3. Click "Connect"
|
||||||
|
4. The debugger pane will appear above the first workflow step
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Variable Browser (Left Panel)
|
||||||
|
|
||||||
|
Click on scope names to expand and view variables:
|
||||||
|
- **Globals**: `github`, `env`, `runner` contexts
|
||||||
|
- **Job Outputs**: Outputs from previous jobs
|
||||||
|
- **Step Outputs**: Outputs from previous steps
|
||||||
|
|
||||||
|
### Console (Right Panel)
|
||||||
|
|
||||||
|
Enter expressions or commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Evaluate expressions
|
||||||
|
${{ github.ref }}
|
||||||
|
${{ github.event_name }}
|
||||||
|
${{ env.MY_VAR }}
|
||||||
|
|
||||||
|
# Run shell commands (prefix with !)
|
||||||
|
!ls -la
|
||||||
|
!cat package.json
|
||||||
|
!env | grep GITHUB
|
||||||
|
|
||||||
|
# Modify variables
|
||||||
|
!export MY_VAR=new_value
|
||||||
|
```
|
||||||
|
|
||||||
|
### Control Buttons
|
||||||
|
|
||||||
|
| Button | Action | Description |
|
||||||
|
|--------|--------|-------------|
|
||||||
|
| ⏮ | Reverse Continue | Go back to first checkpoint |
|
||||||
|
| ◀ | Step Back | Go to previous checkpoint |
|
||||||
|
| ▶ | Continue | Run until next breakpoint/end |
|
||||||
|
| ⏭ | Step (Next) | Step to next workflow step |
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
Browser Extension ──WebSocket──► Proxy ──TCP──► Runner DAP Server
|
||||||
|
(port 4712) (port 4711)
|
||||||
|
```
|
||||||
|
|
||||||
|
The WebSocket proxy handles DAP message framing (Content-Length headers) and provides a browser-compatible connection.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Proxy Settings
|
||||||
|
|
||||||
|
| Environment Variable | Default | Description |
|
||||||
|
|---------------------|---------|-------------|
|
||||||
|
| `WS_PORT` | 4712 | WebSocket server port |
|
||||||
|
| `DAP_HOST` | 127.0.0.1 | DAP server host |
|
||||||
|
| `DAP_PORT` | 4711 | DAP server port |
|
||||||
|
|
||||||
|
Or use CLI arguments:
|
||||||
|
```bash
|
||||||
|
node proxy.js --ws-port 4712 --dap-host 127.0.0.1 --dap-port 4711
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extension Settings
|
||||||
|
|
||||||
|
Click the extension popup to configure:
|
||||||
|
- **Proxy Host**: Default `localhost`
|
||||||
|
- **Proxy Port**: Default `4712`
|
||||||
|
|
||||||
|
## File Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
browser-ext/
|
||||||
|
├── manifest.json # Extension configuration
|
||||||
|
├── background/
|
||||||
|
│ └── background.js # Service worker - DAP client
|
||||||
|
├── content/
|
||||||
|
│ ├── content.js # UI injection and interaction
|
||||||
|
│ └── content.css # Debugger pane styling
|
||||||
|
├── popup/
|
||||||
|
│ ├── popup.html # Extension popup UI
|
||||||
|
│ ├── popup.js # Popup logic
|
||||||
|
│ └── popup.css # Popup styling
|
||||||
|
├── lib/
|
||||||
|
│ └── dap-protocol.js # DAP message helpers
|
||||||
|
├── proxy/
|
||||||
|
│ ├── proxy.js # WebSocket-to-TCP bridge
|
||||||
|
│ └── package.json # Proxy dependencies
|
||||||
|
└── icons/
|
||||||
|
├── icon16.png
|
||||||
|
├── icon48.png
|
||||||
|
└── icon128.png
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### "Failed to connect to DAP server"
|
||||||
|
|
||||||
|
1. Ensure the proxy is running: `node proxy.js`
|
||||||
|
2. Ensure the runner is waiting for a debugger connection
|
||||||
|
3. Check that debug logging is enabled for the job
|
||||||
|
|
||||||
|
### Debugger pane doesn't appear
|
||||||
|
|
||||||
|
1. Verify you're on a job page (`/actions/runs/*/job/*`)
|
||||||
|
2. Open DevTools and check for console errors
|
||||||
|
3. Reload the page after loading the extension
|
||||||
|
|
||||||
|
### Variables don't load
|
||||||
|
|
||||||
|
1. Wait for the "stopped" event (status shows PAUSED)
|
||||||
|
2. Click on a scope to expand it
|
||||||
|
3. Check the console for error messages
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Modifying the Extension
|
||||||
|
|
||||||
|
After making changes:
|
||||||
|
1. Go to `chrome://extensions/`
|
||||||
|
2. Click the refresh icon on the extension card
|
||||||
|
3. Reload the GitHub job page
|
||||||
|
|
||||||
|
### Debugging
|
||||||
|
|
||||||
|
- **Background script**: Inspect via `chrome://extensions/` → "Inspect views: service worker"
|
||||||
|
- **Content script**: Use DevTools on the GitHub page
|
||||||
|
- **Proxy**: Watch terminal output for message logs
|
||||||
|
|
||||||
|
## Security Note
|
||||||
|
|
||||||
|
The proxy and extension are designed for local development. The proxy only accepts connections from localhost. Do not expose the proxy to the network without additional security measures.
|
||||||
528
browser-ext/background/background.js
Normal file
528
browser-ext/background/background.js
Normal file
@@ -0,0 +1,528 @@
|
|||||||
|
/**
|
||||||
|
* Background Script - DAP Client
|
||||||
|
*
|
||||||
|
* Service worker that manages WebSocket connection to the proxy
|
||||||
|
* and handles DAP protocol communication.
|
||||||
|
*
|
||||||
|
* NOTE: Chrome MV3 service workers can be terminated after ~30s of inactivity.
|
||||||
|
* We handle this with:
|
||||||
|
* 1. Keepalive pings to keep the WebSocket active
|
||||||
|
* 2. Automatic reconnection when the service worker restarts
|
||||||
|
* 3. Storing connection state in chrome.storage.session
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Connection state
|
||||||
|
let ws = null;
|
||||||
|
let connectionStatus = 'disconnected'; // disconnected, connecting, connected, paused, running, error
|
||||||
|
let sequenceNumber = 1;
|
||||||
|
const pendingRequests = new Map(); // seq -> { resolve, reject, command, timeout }
|
||||||
|
|
||||||
|
// Reconnection state
|
||||||
|
let reconnectAttempts = 0;
|
||||||
|
const MAX_RECONNECT_ATTEMPTS = 10;
|
||||||
|
const RECONNECT_BASE_DELAY = 1000; // Start with 1s, exponential backoff
|
||||||
|
let reconnectTimer = null;
|
||||||
|
let lastConnectedUrl = null;
|
||||||
|
let wasConnectedBeforeIdle = false;
|
||||||
|
|
||||||
|
// Keepalive interval - send ping every 15s to keep service worker AND WebSocket alive
|
||||||
|
// Chrome MV3 service workers get suspended after ~30s of inactivity
|
||||||
|
// We need to send actual WebSocket messages to keep both alive
|
||||||
|
const KEEPALIVE_INTERVAL = 15000;
|
||||||
|
let keepaliveTimer = null;
|
||||||
|
|
||||||
|
// Default configuration
|
||||||
|
const DEFAULT_URL = 'ws://localhost:4712';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize on service worker startup - check if we should reconnect
|
||||||
|
*/
|
||||||
|
async function initializeOnStartup() {
|
||||||
|
console.log('[Background] Service worker starting up...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Restore state from session storage
|
||||||
|
const data = await chrome.storage.session.get(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
|
||||||
|
|
||||||
|
if (data.shouldBeConnected && data.connectionUrl) {
|
||||||
|
console.log('[Background] Restoring connection after service worker restart');
|
||||||
|
lastConnectedUrl = data.connectionUrl;
|
||||||
|
wasConnectedBeforeIdle = true;
|
||||||
|
|
||||||
|
// Small delay to let things settle
|
||||||
|
setTimeout(() => {
|
||||||
|
connect(data.connectionUrl);
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.log('[Background] No session state to restore');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save connection state to session storage (survives service worker restart)
|
||||||
|
*/
|
||||||
|
async function saveConnectionState() {
|
||||||
|
try {
|
||||||
|
await chrome.storage.session.set({
|
||||||
|
connectionUrl: lastConnectedUrl,
|
||||||
|
shouldBeConnected: connectionStatus !== 'disconnected' && connectionStatus !== 'error',
|
||||||
|
lastStatus: connectionStatus,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[Background] Failed to save connection state:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear connection state from session storage
|
||||||
|
*/
|
||||||
|
async function clearConnectionState() {
|
||||||
|
try {
|
||||||
|
await chrome.storage.session.remove(['connectionUrl', 'shouldBeConnected', 'lastStatus']);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn('[Background] Failed to clear connection state:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start keepalive ping to prevent service worker termination
|
||||||
|
* CRITICAL: We must send actual WebSocket messages to keep the connection alive.
|
||||||
|
* Just having a timer is not enough - Chrome will suspend the service worker
|
||||||
|
* and close the WebSocket with code 1001 after ~30s of inactivity.
|
||||||
|
*/
|
||||||
|
function startKeepalive() {
|
||||||
|
stopKeepalive();
|
||||||
|
|
||||||
|
keepaliveTimer = setInterval(() => {
|
||||||
|
if (ws && ws.readyState === WebSocket.OPEN) {
|
||||||
|
try {
|
||||||
|
// Send a lightweight keepalive message over WebSocket
|
||||||
|
// This does two things:
|
||||||
|
// 1. Keeps the WebSocket connection active (prevents proxy timeout)
|
||||||
|
// 2. Creates activity that keeps the Chrome service worker alive
|
||||||
|
const keepaliveMsg = JSON.stringify({ type: 'keepalive', timestamp: Date.now() });
|
||||||
|
ws.send(keepaliveMsg);
|
||||||
|
console.log('[Background] Keepalive sent');
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[Background] Keepalive error:', e);
|
||||||
|
handleUnexpectedClose();
|
||||||
|
}
|
||||||
|
} else if (wasConnectedBeforeIdle || lastConnectedUrl) {
|
||||||
|
// Connection was lost, try to reconnect
|
||||||
|
console.log('[Background] Connection lost during keepalive check');
|
||||||
|
handleUnexpectedClose();
|
||||||
|
}
|
||||||
|
}, KEEPALIVE_INTERVAL);
|
||||||
|
|
||||||
|
console.log('[Background] Keepalive timer started (interval: ' + KEEPALIVE_INTERVAL + 'ms)');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop keepalive ping
|
||||||
|
*/
|
||||||
|
function stopKeepalive() {
|
||||||
|
if (keepaliveTimer) {
|
||||||
|
clearInterval(keepaliveTimer);
|
||||||
|
keepaliveTimer = null;
|
||||||
|
console.log('[Background] Keepalive timer stopped');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle unexpected connection close - attempt reconnection
|
||||||
|
*/
|
||||||
|
function handleUnexpectedClose() {
|
||||||
|
if (reconnectTimer) {
|
||||||
|
return; // Already trying to reconnect
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!lastConnectedUrl) {
|
||||||
|
console.log('[Background] No URL to reconnect to');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reconnectAttempts >= MAX_RECONNECT_ATTEMPTS) {
|
||||||
|
console.error('[Background] Max reconnection attempts reached');
|
||||||
|
connectionStatus = 'error';
|
||||||
|
broadcastStatus();
|
||||||
|
clearConnectionState();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const delay = Math.min(RECONNECT_BASE_DELAY * Math.pow(2, reconnectAttempts), 30000);
|
||||||
|
reconnectAttempts++;
|
||||||
|
|
||||||
|
console.log(`[Background] Scheduling reconnect attempt ${reconnectAttempts}/${MAX_RECONNECT_ATTEMPTS} in ${delay}ms`);
|
||||||
|
connectionStatus = 'connecting';
|
||||||
|
broadcastStatus();
|
||||||
|
|
||||||
|
reconnectTimer = setTimeout(() => {
|
||||||
|
reconnectTimer = null;
|
||||||
|
if (connectionStatus !== 'connected' && connectionStatus !== 'paused' && connectionStatus !== 'running') {
|
||||||
|
connect(lastConnectedUrl);
|
||||||
|
}
|
||||||
|
}, delay);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to the WebSocket proxy
|
||||||
|
*/
|
||||||
|
function connect(url) {
|
||||||
|
// Clean up existing connection
|
||||||
|
if (ws) {
|
||||||
|
try {
|
||||||
|
ws.onclose = null; // Prevent triggering reconnect
|
||||||
|
ws.close(1000, 'Reconnecting');
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
ws = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear any pending reconnect
|
||||||
|
if (reconnectTimer) {
|
||||||
|
clearTimeout(reconnectTimer);
|
||||||
|
reconnectTimer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
connectionStatus = 'connecting';
|
||||||
|
broadcastStatus();
|
||||||
|
|
||||||
|
// Use provided URL or default
|
||||||
|
const wsUrl = url || DEFAULT_URL;
|
||||||
|
lastConnectedUrl = wsUrl;
|
||||||
|
console.log(`[Background] Connecting to ${wsUrl}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
ws = new WebSocket(wsUrl);
|
||||||
|
} catch (e) {
|
||||||
|
console.error('[Background] Failed to create WebSocket:', e);
|
||||||
|
connectionStatus = 'error';
|
||||||
|
broadcastStatus();
|
||||||
|
handleUnexpectedClose();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ws.onopen = async () => {
|
||||||
|
console.log('[Background] WebSocket connected');
|
||||||
|
connectionStatus = 'connected';
|
||||||
|
reconnectAttempts = 0; // Reset on successful connection
|
||||||
|
wasConnectedBeforeIdle = true;
|
||||||
|
broadcastStatus();
|
||||||
|
saveConnectionState();
|
||||||
|
startKeepalive();
|
||||||
|
|
||||||
|
// Initialize DAP session
|
||||||
|
try {
|
||||||
|
await initializeDapSession();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Background] Failed to initialize DAP session:', error);
|
||||||
|
// Don't set error status - the connection might still be usable
|
||||||
|
// The DAP server might just need the job to progress
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onmessage = (event) => {
|
||||||
|
try {
|
||||||
|
const message = JSON.parse(event.data);
|
||||||
|
handleDapMessage(message);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Background] Failed to parse message:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onclose = (event) => {
|
||||||
|
console.log(`[Background] WebSocket closed: ${event.code} ${event.reason || '(no reason)'}`);
|
||||||
|
ws = null;
|
||||||
|
stopKeepalive();
|
||||||
|
|
||||||
|
// Reject any pending requests
|
||||||
|
for (const [seq, pending] of pendingRequests) {
|
||||||
|
if (pending.timeout) clearTimeout(pending.timeout);
|
||||||
|
pending.reject(new Error('Connection closed'));
|
||||||
|
}
|
||||||
|
pendingRequests.clear();
|
||||||
|
|
||||||
|
// Determine if we should reconnect
|
||||||
|
// Code 1000 = normal closure (user initiated)
|
||||||
|
// Code 1001 = going away (service worker idle, browser closing, etc.)
|
||||||
|
// Code 1006 = abnormal closure (connection lost)
|
||||||
|
// Code 1011 = server error
|
||||||
|
const shouldReconnect = event.code !== 1000;
|
||||||
|
|
||||||
|
if (shouldReconnect && wasConnectedBeforeIdle) {
|
||||||
|
console.log('[Background] Unexpected close, will attempt reconnect');
|
||||||
|
connectionStatus = 'connecting';
|
||||||
|
broadcastStatus();
|
||||||
|
handleUnexpectedClose();
|
||||||
|
} else {
|
||||||
|
connectionStatus = 'disconnected';
|
||||||
|
wasConnectedBeforeIdle = false;
|
||||||
|
broadcastStatus();
|
||||||
|
clearConnectionState();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onerror = (event) => {
|
||||||
|
console.error('[Background] WebSocket error:', event);
|
||||||
|
// onclose will be called after onerror, so we handle reconnection there
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect from the WebSocket proxy
|
||||||
|
*/
|
||||||
|
function disconnect() {
|
||||||
|
// Stop any reconnection attempts
|
||||||
|
if (reconnectTimer) {
|
||||||
|
clearTimeout(reconnectTimer);
|
||||||
|
reconnectTimer = null;
|
||||||
|
}
|
||||||
|
reconnectAttempts = 0;
|
||||||
|
wasConnectedBeforeIdle = false;
|
||||||
|
stopKeepalive();
|
||||||
|
|
||||||
|
if (ws) {
|
||||||
|
// Send disconnect request to DAP server first
|
||||||
|
sendDapRequest('disconnect', {}).catch(() => {});
|
||||||
|
|
||||||
|
// Prevent reconnection on this close
|
||||||
|
const socket = ws;
|
||||||
|
ws = null;
|
||||||
|
socket.onclose = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
socket.close(1000, 'User disconnected');
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
connectionStatus = 'disconnected';
|
||||||
|
broadcastStatus();
|
||||||
|
clearConnectionState();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize DAP session (initialize + attach + configurationDone)
|
||||||
|
*/
|
||||||
|
async function initializeDapSession() {
|
||||||
|
// 1. Initialize
|
||||||
|
const initResponse = await sendDapRequest('initialize', {
|
||||||
|
clientID: 'browser-extension',
|
||||||
|
clientName: 'Actions DAP Debugger',
|
||||||
|
adapterID: 'github-actions-runner',
|
||||||
|
pathFormat: 'path',
|
||||||
|
linesStartAt1: true,
|
||||||
|
columnsStartAt1: true,
|
||||||
|
supportsVariableType: true,
|
||||||
|
supportsVariablePaging: true,
|
||||||
|
supportsRunInTerminalRequest: false,
|
||||||
|
supportsProgressReporting: false,
|
||||||
|
supportsInvalidatedEvent: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('[Background] Initialize response:', initResponse);
|
||||||
|
|
||||||
|
// 2. Attach to running session
|
||||||
|
const attachResponse = await sendDapRequest('attach', {});
|
||||||
|
console.log('[Background] Attach response:', attachResponse);
|
||||||
|
|
||||||
|
// 3. Configuration done
|
||||||
|
const configResponse = await sendDapRequest('configurationDone', {});
|
||||||
|
console.log('[Background] ConfigurationDone response:', configResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a DAP request and return a promise for the response
|
||||||
|
*/
|
||||||
|
function sendDapRequest(command, args = {}) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
if (!ws || ws.readyState !== WebSocket.OPEN) {
|
||||||
|
reject(new Error('Not connected'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const seq = sequenceNumber++;
|
||||||
|
const request = {
|
||||||
|
seq,
|
||||||
|
type: 'request',
|
||||||
|
command,
|
||||||
|
arguments: args,
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log(`[Background] Sending DAP request: ${command} (seq: ${seq})`);
|
||||||
|
|
||||||
|
// Set timeout for request
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
if (pendingRequests.has(seq)) {
|
||||||
|
pendingRequests.delete(seq);
|
||||||
|
reject(new Error(`Request timed out: ${command}`));
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
pendingRequests.set(seq, { resolve, reject, command, timeout });
|
||||||
|
|
||||||
|
try {
|
||||||
|
ws.send(JSON.stringify(request));
|
||||||
|
} catch (e) {
|
||||||
|
pendingRequests.delete(seq);
|
||||||
|
clearTimeout(timeout);
|
||||||
|
reject(new Error(`Failed to send request: ${e.message}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle incoming DAP message (response or event)
|
||||||
|
*/
|
||||||
|
function handleDapMessage(message) {
|
||||||
|
if (message.type === 'response') {
|
||||||
|
handleDapResponse(message);
|
||||||
|
} else if (message.type === 'event') {
|
||||||
|
handleDapEvent(message);
|
||||||
|
} else if (message.type === 'proxy-error') {
|
||||||
|
console.error('[Background] Proxy error:', message.message);
|
||||||
|
// Don't immediately set error status - might be transient
|
||||||
|
} else if (message.type === 'keepalive-ack') {
|
||||||
|
// Keepalive acknowledged by proxy - connection is healthy
|
||||||
|
console.log('[Background] Keepalive acknowledged');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle DAP response
|
||||||
|
*/
|
||||||
|
function handleDapResponse(response) {
|
||||||
|
const pending = pendingRequests.get(response.request_seq);
|
||||||
|
if (!pending) {
|
||||||
|
console.warn(`[Background] No pending request for seq ${response.request_seq}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pendingRequests.delete(response.request_seq);
|
||||||
|
if (pending.timeout) clearTimeout(pending.timeout);
|
||||||
|
|
||||||
|
if (response.success) {
|
||||||
|
console.log(`[Background] DAP response success: ${response.command}`);
|
||||||
|
pending.resolve(response.body || {});
|
||||||
|
} else {
|
||||||
|
console.error(`[Background] DAP response error: ${response.command} - ${response.message}`);
|
||||||
|
pending.reject(new Error(response.message || 'Unknown error'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle DAP event
|
||||||
|
*/
|
||||||
|
function handleDapEvent(event) {
|
||||||
|
console.log(`[Background] DAP event: ${event.event}`, event.body);
|
||||||
|
|
||||||
|
switch (event.event) {
|
||||||
|
case 'initialized':
|
||||||
|
// DAP server is ready
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stopped':
|
||||||
|
connectionStatus = 'paused';
|
||||||
|
broadcastStatus();
|
||||||
|
saveConnectionState();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'continued':
|
||||||
|
connectionStatus = 'running';
|
||||||
|
broadcastStatus();
|
||||||
|
saveConnectionState();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'terminated':
|
||||||
|
connectionStatus = 'disconnected';
|
||||||
|
wasConnectedBeforeIdle = false;
|
||||||
|
broadcastStatus();
|
||||||
|
clearConnectionState();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'output':
|
||||||
|
// Output event - forward to content scripts
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Broadcast event to all content scripts
|
||||||
|
broadcastEvent(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Broadcast connection status to popup and content scripts
|
||||||
|
*/
|
||||||
|
function broadcastStatus() {
|
||||||
|
const statusMessage = { type: 'status-changed', status: connectionStatus };
|
||||||
|
|
||||||
|
// Broadcast to all extension contexts (popup)
|
||||||
|
chrome.runtime.sendMessage(statusMessage).catch(() => {});
|
||||||
|
|
||||||
|
// Broadcast to content scripts
|
||||||
|
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
|
||||||
|
if (chrome.runtime.lastError) return;
|
||||||
|
tabs.forEach((tab) => {
|
||||||
|
chrome.tabs.sendMessage(tab.id, statusMessage).catch(() => {});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Broadcast DAP event to content scripts
|
||||||
|
*/
|
||||||
|
function broadcastEvent(event) {
|
||||||
|
chrome.tabs.query({ url: 'https://github.com/*/*/actions/runs/*/job/*' }, (tabs) => {
|
||||||
|
if (chrome.runtime.lastError) return;
|
||||||
|
tabs.forEach((tab) => {
|
||||||
|
chrome.tabs.sendMessage(tab.id, { type: 'dap-event', event }).catch(() => {});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Message handler for requests from popup and content scripts
|
||||||
|
*/
|
||||||
|
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
|
||||||
|
console.log('[Background] Received message:', message.type);
|
||||||
|
|
||||||
|
switch (message.type) {
|
||||||
|
case 'get-status':
|
||||||
|
sendResponse({ status: connectionStatus, reconnecting: reconnectTimer !== null });
|
||||||
|
return false;
|
||||||
|
|
||||||
|
case 'connect':
|
||||||
|
reconnectAttempts = 0; // Reset attempts on manual connect
|
||||||
|
connect(message.url || DEFAULT_URL);
|
||||||
|
sendResponse({ status: connectionStatus });
|
||||||
|
return false;
|
||||||
|
|
||||||
|
case 'disconnect':
|
||||||
|
disconnect();
|
||||||
|
sendResponse({ status: connectionStatus });
|
||||||
|
return false;
|
||||||
|
|
||||||
|
case 'dap-request':
|
||||||
|
// Handle DAP request from content script
|
||||||
|
sendDapRequest(message.command, message.args || {})
|
||||||
|
.then((body) => {
|
||||||
|
sendResponse({ success: true, body });
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
sendResponse({ success: false, error: error.message });
|
||||||
|
});
|
||||||
|
return true; // Will respond asynchronously
|
||||||
|
|
||||||
|
default:
|
||||||
|
console.warn('[Background] Unknown message type:', message.type);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Initialize on startup
|
||||||
|
initializeOnStartup();
|
||||||
|
|
||||||
|
// Log startup
|
||||||
|
console.log('[Background] Actions DAP Debugger background script loaded');
|
||||||
337
browser-ext/content/content.css
Normal file
337
browser-ext/content/content.css
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
/**
|
||||||
|
* Content Script Styles
|
||||||
|
*
|
||||||
|
* Matches GitHub's Primer design system for seamless integration.
|
||||||
|
* Uses CSS custom properties for light/dark mode support.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Debugger Pane Container */
|
||||||
|
.dap-debugger-pane {
|
||||||
|
background-color: var(--bgColor-default, #0d1117);
|
||||||
|
border-color: var(--borderColor-default, #30363d) !important;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
.dap-header {
|
||||||
|
background-color: var(--bgColor-muted, #161b22);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-header .octicon {
|
||||||
|
color: var(--fgColor-muted, #8b949e);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-step-info {
|
||||||
|
flex: 1;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status Labels */
|
||||||
|
.dap-status-label {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Label--attention {
|
||||||
|
background-color: #9e6a03 !important;
|
||||||
|
color: #ffffff !important;
|
||||||
|
border: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Label--success {
|
||||||
|
background-color: #238636 !important;
|
||||||
|
color: #ffffff !important;
|
||||||
|
border: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Label--danger {
|
||||||
|
background-color: #da3633 !important;
|
||||||
|
color: #ffffff !important;
|
||||||
|
border: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.Label--secondary {
|
||||||
|
background-color: #30363d !important;
|
||||||
|
color: #8b949e !important;
|
||||||
|
border: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Content Area */
|
||||||
|
.dap-content {
|
||||||
|
min-height: 200px;
|
||||||
|
max-height: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scopes Panel */
|
||||||
|
.dap-scopes {
|
||||||
|
border-color: var(--borderColor-default, #30363d) !important;
|
||||||
|
min-width: 150px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-scope-header {
|
||||||
|
background-color: var(--bgColor-muted, #161b22);
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-scope-tree {
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.6;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tree Nodes */
|
||||||
|
.dap-tree-node {
|
||||||
|
padding: 1px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-content {
|
||||||
|
display: flex;
|
||||||
|
align-items: flex-start;
|
||||||
|
padding: 2px 4px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-content:hover {
|
||||||
|
background-color: var(--bgColor-muted, #161b22);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-children {
|
||||||
|
margin-left: 16px;
|
||||||
|
border-left: 1px solid var(--borderColor-muted, #21262d);
|
||||||
|
padding-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-expand-icon {
|
||||||
|
display: inline-block;
|
||||||
|
width: 16px;
|
||||||
|
text-align: center;
|
||||||
|
color: var(--fgColor-muted, #8b949e);
|
||||||
|
font-size: 10px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-node .text-bold {
|
||||||
|
color: var(--fgColor-default, #e6edf3);
|
||||||
|
font-weight: 600;
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-node .color-fg-muted {
|
||||||
|
color: var(--fgColor-muted, #8b949e);
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* REPL Console */
|
||||||
|
.dap-repl {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-header {
|
||||||
|
background-color: var(--bgColor-muted, #161b22);
|
||||||
|
font-size: 12px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-output {
|
||||||
|
background-color: var(--bgColor-inset, #010409);
|
||||||
|
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.5;
|
||||||
|
padding: 8px;
|
||||||
|
flex: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
min-height: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-output-input {
|
||||||
|
color: var(--fgColor-muted, #8b949e);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-output-result {
|
||||||
|
color: var(--fgColor-default, #e6edf3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-output-stdout {
|
||||||
|
color: var(--fgColor-default, #e6edf3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-output-error {
|
||||||
|
color: var(--fgColor-danger, #f85149);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* REPL Input */
|
||||||
|
.dap-repl-input {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-input input {
|
||||||
|
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
background-color: var(--bgColor-inset, #010409) !important;
|
||||||
|
border-color: var(--borderColor-default, #30363d) !important;
|
||||||
|
color: var(--fgColor-default, #e6edf3) !important;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-input input:focus {
|
||||||
|
border-color: var(--focus-outlineColor, #1f6feb) !important;
|
||||||
|
outline: none;
|
||||||
|
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-input input:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-input input::placeholder {
|
||||||
|
color: var(--fgColor-muted, #8b949e);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Control Buttons */
|
||||||
|
.dap-controls {
|
||||||
|
background-color: var(--bgColor-muted, #161b22);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-controls button {
|
||||||
|
min-width: 32px;
|
||||||
|
height: 28px;
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
padding: 0 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-controls button svg {
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-controls button:disabled {
|
||||||
|
opacity: 0.4;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-controls button:not(:disabled):hover {
|
||||||
|
background-color: var(--bgColor-accent-muted, #388bfd26);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-step-counter {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Utility Classes (in case GitHub's aren't loaded) */
|
||||||
|
.d-flex { display: flex; }
|
||||||
|
.flex-column { flex-direction: column; }
|
||||||
|
.flex-items-center { align-items: center; }
|
||||||
|
.flex-auto { flex: 1 1 auto; }
|
||||||
|
|
||||||
|
.p-2 { padding: 8px; }
|
||||||
|
.px-2 { padding-left: 8px; padding-right: 8px; }
|
||||||
|
.mx-2 { margin-left: 8px; margin-right: 8px; }
|
||||||
|
.mb-2 { margin-bottom: 8px; }
|
||||||
|
.ml-2 { margin-left: 8px; }
|
||||||
|
.ml-3 { margin-left: 16px; }
|
||||||
|
.mr-2 { margin-right: 8px; }
|
||||||
|
.ml-auto { margin-left: auto; }
|
||||||
|
|
||||||
|
.border { border: 1px solid var(--borderColor-default, #30363d); }
|
||||||
|
.border-bottom { border-bottom: 1px solid var(--borderColor-default, #30363d); }
|
||||||
|
.border-top { border-top: 1px solid var(--borderColor-default, #30363d); }
|
||||||
|
.border-right { border-right: 1px solid var(--borderColor-default, #30363d); }
|
||||||
|
.rounded-2 { border-radius: 6px; }
|
||||||
|
|
||||||
|
.overflow-auto { overflow: auto; }
|
||||||
|
.text-bold { font-weight: 600; }
|
||||||
|
.text-mono { font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace; }
|
||||||
|
.text-small { font-size: 12px; }
|
||||||
|
|
||||||
|
.color-fg-muted { color: var(--fgColor-muted, #8b949e); }
|
||||||
|
.color-fg-danger { color: var(--fgColor-danger, #f85149); }
|
||||||
|
.color-fg-default { color: var(--fgColor-default, #e6edf3); }
|
||||||
|
|
||||||
|
/* Light mode overrides */
|
||||||
|
@media (prefers-color-scheme: light) {
|
||||||
|
.dap-debugger-pane {
|
||||||
|
background-color: var(--bgColor-default, #ffffff);
|
||||||
|
border-color: var(--borderColor-default, #d0d7de) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-header,
|
||||||
|
.dap-scope-header,
|
||||||
|
.dap-repl-header,
|
||||||
|
.dap-controls {
|
||||||
|
background-color: var(--bgColor-muted, #f6f8fa);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-repl-output,
|
||||||
|
.dap-repl-input input {
|
||||||
|
background-color: var(--bgColor-inset, #f6f8fa) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-tree-node .text-bold {
|
||||||
|
color: var(--fgColor-default, #1f2328);
|
||||||
|
}
|
||||||
|
|
||||||
|
.color-fg-muted {
|
||||||
|
color: var(--fgColor-muted, #656d76);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Respect GitHub's color mode data attribute */
|
||||||
|
[data-color-mode="light"] .dap-debugger-pane,
|
||||||
|
html[data-color-mode="light"] .dap-debugger-pane {
|
||||||
|
background-color: #ffffff;
|
||||||
|
border-color: #d0d7de !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-color-mode="light"] .dap-header,
|
||||||
|
[data-color-mode="light"] .dap-scope-header,
|
||||||
|
[data-color-mode="light"] .dap-repl-header,
|
||||||
|
[data-color-mode="light"] .dap-controls,
|
||||||
|
html[data-color-mode="light"] .dap-header,
|
||||||
|
html[data-color-mode="light"] .dap-scope-header,
|
||||||
|
html[data-color-mode="light"] .dap-repl-header,
|
||||||
|
html[data-color-mode="light"] .dap-controls {
|
||||||
|
background-color: #f6f8fa;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-color-mode="light"] .dap-repl-output,
|
||||||
|
[data-color-mode="light"] .dap-repl-input input,
|
||||||
|
html[data-color-mode="light"] .dap-repl-output,
|
||||||
|
html[data-color-mode="light"] .dap-repl-input input {
|
||||||
|
background-color: #f6f8fa !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Debug Button in Header */
|
||||||
|
.dap-debug-btn-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-debug-btn {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-debug-btn.selected {
|
||||||
|
background-color: var(--bgColor-accent-muted, #388bfd26);
|
||||||
|
border-color: var(--borderColor-accent-emphasis, #388bfd);
|
||||||
|
}
|
||||||
|
|
||||||
|
.dap-debug-btn:hover:not(:disabled) {
|
||||||
|
background-color: var(--bgColor-neutral-muted, #6e768166);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Light mode for debug button */
|
||||||
|
[data-color-mode="light"] .dap-debug-btn.selected,
|
||||||
|
html[data-color-mode="light"] .dap-debug-btn.selected {
|
||||||
|
background-color: #ddf4ff;
|
||||||
|
border-color: #54aeff;
|
||||||
|
}
|
||||||
767
browser-ext/content/content.js
Normal file
767
browser-ext/content/content.js
Normal file
@@ -0,0 +1,767 @@
|
|||||||
|
/**
|
||||||
|
* Content Script - Debugger UI
|
||||||
|
*
|
||||||
|
* Injects the debugger pane into GitHub Actions job pages and handles
|
||||||
|
* all UI interactions.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// State
|
||||||
|
let debuggerPane = null;
|
||||||
|
let currentFrameId = 0;
|
||||||
|
let isConnected = false;
|
||||||
|
let replHistory = [];
|
||||||
|
let replHistoryIndex = -1;
|
||||||
|
|
||||||
|
// HTML escape helper
|
||||||
|
function escapeHtml(text) {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
div.textContent = text;
|
||||||
|
return div.innerHTML;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Strip result indicator suffix from step name
|
||||||
|
* e.g., "Run tests [running]" -> "Run tests"
|
||||||
|
*/
|
||||||
|
function stripResultIndicator(name) {
|
||||||
|
return name.replace(/\s*\[(running|success|failure|skipped|cancelled)\]$/i, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send DAP request to background script
|
||||||
|
*/
|
||||||
|
function sendDapRequest(command, args = {}) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
chrome.runtime.sendMessage({ type: 'dap-request', command, args }, (response) => {
|
||||||
|
if (chrome.runtime.lastError) {
|
||||||
|
reject(new Error(chrome.runtime.lastError.message));
|
||||||
|
} else if (response && response.success) {
|
||||||
|
resolve(response.body);
|
||||||
|
} else {
|
||||||
|
reject(new Error(response?.error || 'Unknown error'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build map of steps from DOM
|
||||||
|
*/
|
||||||
|
function buildStepMap() {
|
||||||
|
const steps = document.querySelectorAll('check-step');
|
||||||
|
const map = new Map();
|
||||||
|
steps.forEach((el, idx) => {
|
||||||
|
map.set(idx, {
|
||||||
|
element: el,
|
||||||
|
number: parseInt(el.dataset.number),
|
||||||
|
name: el.dataset.name,
|
||||||
|
conclusion: el.dataset.conclusion,
|
||||||
|
externalId: el.dataset.externalId,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find step element by name
|
||||||
|
*/
|
||||||
|
function findStepByName(stepName) {
|
||||||
|
return document.querySelector(`check-step[data-name="${CSS.escape(stepName)}"]`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find step element by number
|
||||||
|
*/
|
||||||
|
function findStepByNumber(stepNumber) {
|
||||||
|
return document.querySelector(`check-step[data-number="${stepNumber}"]`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all step elements
|
||||||
|
*/
|
||||||
|
function getAllSteps() {
|
||||||
|
return document.querySelectorAll('check-step');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the debugger pane HTML
|
||||||
|
*/
|
||||||
|
function createDebuggerPaneHTML() {
|
||||||
|
return `
|
||||||
|
<div class="dap-header d-flex flex-items-center p-2 border-bottom">
|
||||||
|
<svg class="octicon mr-2" viewBox="0 0 16 16" width="16" height="16">
|
||||||
|
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||||
|
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||||
|
</svg>
|
||||||
|
<span class="text-bold">Debugger</span>
|
||||||
|
<span class="dap-step-info color-fg-muted ml-2">Connecting...</span>
|
||||||
|
<span class="Label dap-status-label ml-auto">CONNECTING</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="dap-content d-flex" style="height: 300px;">
|
||||||
|
<!-- Scopes Panel -->
|
||||||
|
<div class="dap-scopes border-right overflow-auto" style="width: 33%;">
|
||||||
|
<div class="dap-scope-header p-2 text-bold border-bottom">Variables</div>
|
||||||
|
<div class="dap-scope-tree p-2">
|
||||||
|
<div class="color-fg-muted">Connect to view variables</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- REPL Console -->
|
||||||
|
<div class="dap-repl d-flex flex-column" style="width: 67%;">
|
||||||
|
<div class="dap-repl-header p-2 text-bold border-bottom">Console</div>
|
||||||
|
<div class="dap-repl-output overflow-auto flex-auto p-2 text-mono text-small">
|
||||||
|
<div class="color-fg-muted">Welcome to Actions DAP Debugger</div>
|
||||||
|
<div class="color-fg-muted">Enter expressions like: \${{ github.ref }}</div>
|
||||||
|
<div class="color-fg-muted">Or shell commands: !ls -la</div>
|
||||||
|
</div>
|
||||||
|
<div class="dap-repl-input border-top p-2">
|
||||||
|
<input type="text" class="form-control input-sm text-mono"
|
||||||
|
placeholder="Enter expression or !command" disabled>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Control buttons -->
|
||||||
|
<div class="dap-controls d-flex flex-items-center p-2 border-top">
|
||||||
|
<button class="btn btn-sm mr-2" data-action="reverseContinue" title="Reverse Continue (go to first checkpoint)" disabled>
|
||||||
|
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V8.5l5 4V8.5l5 4V2.5l-5 4V2.5l-5 4V2z"/></svg>
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm mr-2" data-action="stepBack" title="Step Back" disabled>
|
||||||
|
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 2v12h2V2H2zm3 6 7 5V3L5 8z"/></svg>
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm btn-primary mr-2" data-action="continue" title="Continue" disabled>
|
||||||
|
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M4 2l10 6-10 6z"/></svg>
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm mr-2" data-action="next" title="Step to Next" disabled>
|
||||||
|
<svg viewBox="0 0 16 16" width="16" height="16"><path fill="currentColor" d="M2 3l7 5-7 5V3zm7 5l5 0V2h2v12h-2V8.5l-5 0z"/></svg>
|
||||||
|
</button>
|
||||||
|
<span class="dap-step-counter color-fg-muted ml-auto text-small">
|
||||||
|
Not connected
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inject debugger pane into the page
|
||||||
|
*/
|
||||||
|
function injectDebuggerPane() {
|
||||||
|
// Remove existing pane if any
|
||||||
|
const existing = document.querySelector('.dap-debugger-pane');
|
||||||
|
if (existing) existing.remove();
|
||||||
|
|
||||||
|
// Find where to inject
|
||||||
|
const stepsContainer = document.querySelector('check-steps');
|
||||||
|
if (!stepsContainer) {
|
||||||
|
console.warn('[Content] No check-steps container found');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create pane
|
||||||
|
const pane = document.createElement('div');
|
||||||
|
pane.className = 'dap-debugger-pane mx-2 mb-2 border rounded-2';
|
||||||
|
pane.innerHTML = createDebuggerPaneHTML();
|
||||||
|
|
||||||
|
// Insert before the first real workflow step (skip "Set up job" at index 0)
|
||||||
|
const steps = stepsContainer.querySelectorAll('check-step');
|
||||||
|
const targetStep = steps.length > 1 ? steps[1] : stepsContainer.firstChild;
|
||||||
|
stepsContainer.insertBefore(pane, targetStep);
|
||||||
|
|
||||||
|
// Setup event handlers
|
||||||
|
setupPaneEventHandlers(pane);
|
||||||
|
|
||||||
|
debuggerPane = pane;
|
||||||
|
return pane;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Move debugger pane to before a specific step
|
||||||
|
*/
|
||||||
|
function moveDebuggerPane(stepElement, stepName) {
|
||||||
|
if (!debuggerPane || !stepElement) return;
|
||||||
|
|
||||||
|
// Move the pane
|
||||||
|
stepElement.parentNode.insertBefore(debuggerPane, stepElement);
|
||||||
|
|
||||||
|
// Update step info
|
||||||
|
const stepInfo = debuggerPane.querySelector('.dap-step-info');
|
||||||
|
if (stepInfo) {
|
||||||
|
stepInfo.textContent = `Paused before: ${stepName}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup event handlers for debugger pane
|
||||||
|
*/
|
||||||
|
function setupPaneEventHandlers(pane) {
|
||||||
|
// Control buttons
|
||||||
|
pane.querySelectorAll('[data-action]').forEach((btn) => {
|
||||||
|
btn.addEventListener('click', async () => {
|
||||||
|
const action = btn.dataset.action;
|
||||||
|
enableControls(false);
|
||||||
|
updateStatus('RUNNING');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await sendDapRequest(action, { threadId: 1 });
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`[Content] DAP ${action} failed:`, error);
|
||||||
|
appendOutput(`Error: ${error.message}`, 'error');
|
||||||
|
enableControls(true);
|
||||||
|
updateStatus('ERROR');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// REPL input
|
||||||
|
const input = pane.querySelector('.dap-repl-input input');
|
||||||
|
if (input) {
|
||||||
|
input.addEventListener('keydown', handleReplKeydown);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle REPL input keydown
|
||||||
|
*/
|
||||||
|
async function handleReplKeydown(e) {
|
||||||
|
const input = e.target;
|
||||||
|
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
const command = input.value.trim();
|
||||||
|
if (!command) return;
|
||||||
|
|
||||||
|
replHistory.push(command);
|
||||||
|
replHistoryIndex = replHistory.length;
|
||||||
|
input.value = '';
|
||||||
|
|
||||||
|
// Show command
|
||||||
|
appendOutput(`> ${command}`, 'input');
|
||||||
|
|
||||||
|
// Send to DAP
|
||||||
|
try {
|
||||||
|
const response = await sendDapRequest('evaluate', {
|
||||||
|
expression: command,
|
||||||
|
frameId: currentFrameId,
|
||||||
|
context: command.startsWith('!') ? 'repl' : 'watch',
|
||||||
|
});
|
||||||
|
// Only show result if it's NOT an exit code summary
|
||||||
|
// (shell command output is already streamed via output events)
|
||||||
|
if (response.result && !/^\(exit code: -?\d+\)$/.test(response.result)) {
|
||||||
|
appendOutput(response.result, 'result');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
appendOutput(error.message, 'error');
|
||||||
|
}
|
||||||
|
} else if (e.key === 'ArrowUp') {
|
||||||
|
if (replHistoryIndex > 0) {
|
||||||
|
replHistoryIndex--;
|
||||||
|
input.value = replHistory[replHistoryIndex];
|
||||||
|
}
|
||||||
|
e.preventDefault();
|
||||||
|
} else if (e.key === 'ArrowDown') {
|
||||||
|
if (replHistoryIndex < replHistory.length - 1) {
|
||||||
|
replHistoryIndex++;
|
||||||
|
input.value = replHistory[replHistoryIndex];
|
||||||
|
} else {
|
||||||
|
replHistoryIndex = replHistory.length;
|
||||||
|
input.value = '';
|
||||||
|
}
|
||||||
|
e.preventDefault();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Append output to REPL console
|
||||||
|
*/
|
||||||
|
function appendOutput(text, type) {
|
||||||
|
const output = document.querySelector('.dap-repl-output');
|
||||||
|
if (!output) return;
|
||||||
|
|
||||||
|
// Handle multi-line output - each line gets its own div
|
||||||
|
const lines = text.split('\n');
|
||||||
|
lines.forEach((l) => {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
div.className = `dap-output-${type}`;
|
||||||
|
if (type === 'error') div.classList.add('color-fg-danger');
|
||||||
|
if (type === 'input') div.classList.add('color-fg-muted');
|
||||||
|
div.textContent = l;
|
||||||
|
output.appendChild(div);
|
||||||
|
});
|
||||||
|
|
||||||
|
output.scrollTop = output.scrollHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable/disable control buttons
|
||||||
|
*/
|
||||||
|
function enableControls(enabled) {
|
||||||
|
if (!debuggerPane) return;
|
||||||
|
|
||||||
|
debuggerPane.querySelectorAll('.dap-controls button').forEach((btn) => {
|
||||||
|
btn.disabled = !enabled;
|
||||||
|
});
|
||||||
|
|
||||||
|
const input = debuggerPane.querySelector('.dap-repl-input input');
|
||||||
|
if (input) {
|
||||||
|
input.disabled = !enabled;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update status display
|
||||||
|
*/
|
||||||
|
function updateStatus(status, extra) {
|
||||||
|
if (!debuggerPane) return;
|
||||||
|
|
||||||
|
const label = debuggerPane.querySelector('.dap-status-label');
|
||||||
|
if (label) {
|
||||||
|
label.textContent = status;
|
||||||
|
label.className = 'Label dap-status-label ml-auto ';
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'PAUSED':
|
||||||
|
label.classList.add('Label--attention');
|
||||||
|
break;
|
||||||
|
case 'RUNNING':
|
||||||
|
label.classList.add('Label--success');
|
||||||
|
break;
|
||||||
|
case 'TERMINATED':
|
||||||
|
case 'DISCONNECTED':
|
||||||
|
label.classList.add('Label--secondary');
|
||||||
|
break;
|
||||||
|
case 'ERROR':
|
||||||
|
label.classList.add('Label--danger');
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
label.classList.add('Label--secondary');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update step counter if extra info provided
|
||||||
|
if (extra) {
|
||||||
|
const counter = debuggerPane.querySelector('.dap-step-counter');
|
||||||
|
if (counter) {
|
||||||
|
counter.textContent = extra;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load scopes for current frame
|
||||||
|
*/
|
||||||
|
async function loadScopes(frameId) {
|
||||||
|
const scopesContainer = document.querySelector('.dap-scope-tree');
|
||||||
|
if (!scopesContainer) return;
|
||||||
|
|
||||||
|
scopesContainer.innerHTML = '<div class="color-fg-muted">Loading...</div>';
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('[Content] Loading scopes for frame:', frameId);
|
||||||
|
const response = await sendDapRequest('scopes', { frameId });
|
||||||
|
console.log('[Content] Scopes response:', response);
|
||||||
|
|
||||||
|
scopesContainer.innerHTML = '';
|
||||||
|
|
||||||
|
if (!response.scopes || response.scopes.length === 0) {
|
||||||
|
scopesContainer.innerHTML = '<div class="color-fg-muted">No scopes available</div>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const scope of response.scopes) {
|
||||||
|
console.log('[Content] Creating tree node for scope:', scope.name, 'variablesRef:', scope.variablesReference);
|
||||||
|
// Only mark as expandable if variablesReference > 0
|
||||||
|
const isExpandable = scope.variablesReference > 0;
|
||||||
|
const node = createTreeNode(scope.name, scope.variablesReference, isExpandable);
|
||||||
|
scopesContainer.appendChild(node);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Content] Failed to load scopes:', error);
|
||||||
|
scopesContainer.innerHTML = `<div class="color-fg-danger">Error: ${escapeHtml(error.message)}</div>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a tree node for scope/variable display
|
||||||
|
*/
|
||||||
|
function createTreeNode(name, variablesReference, isExpandable, value) {
|
||||||
|
const node = document.createElement('div');
|
||||||
|
node.className = 'dap-tree-node';
|
||||||
|
node.dataset.variablesRef = variablesReference;
|
||||||
|
|
||||||
|
const content = document.createElement('div');
|
||||||
|
content.className = 'dap-tree-content';
|
||||||
|
|
||||||
|
// Expand icon
|
||||||
|
const expandIcon = document.createElement('span');
|
||||||
|
expandIcon.className = 'dap-expand-icon';
|
||||||
|
expandIcon.textContent = isExpandable ? '\u25B6' : ' '; // ▶ or space
|
||||||
|
content.appendChild(expandIcon);
|
||||||
|
|
||||||
|
// Name
|
||||||
|
const nameSpan = document.createElement('span');
|
||||||
|
nameSpan.className = 'text-bold';
|
||||||
|
nameSpan.textContent = name;
|
||||||
|
content.appendChild(nameSpan);
|
||||||
|
|
||||||
|
// Value (if provided)
|
||||||
|
if (value !== undefined) {
|
||||||
|
const valueSpan = document.createElement('span');
|
||||||
|
valueSpan.className = 'color-fg-muted';
|
||||||
|
valueSpan.textContent = `: ${value}`;
|
||||||
|
content.appendChild(valueSpan);
|
||||||
|
}
|
||||||
|
|
||||||
|
node.appendChild(content);
|
||||||
|
|
||||||
|
if (isExpandable && variablesReference > 0) {
|
||||||
|
content.style.cursor = 'pointer';
|
||||||
|
content.addEventListener('click', () => toggleTreeNode(node));
|
||||||
|
}
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Toggle tree node expansion
|
||||||
|
*/
|
||||||
|
async function toggleTreeNode(node) {
|
||||||
|
const children = node.querySelector('.dap-tree-children');
|
||||||
|
const expandIcon = node.querySelector('.dap-expand-icon');
|
||||||
|
|
||||||
|
if (children) {
|
||||||
|
// Toggle visibility
|
||||||
|
children.hidden = !children.hidden;
|
||||||
|
expandIcon.textContent = children.hidden ? '\u25B6' : '\u25BC'; // ▶ or ▼
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch children
|
||||||
|
const variablesRef = parseInt(node.dataset.variablesRef);
|
||||||
|
if (!variablesRef) return;
|
||||||
|
|
||||||
|
expandIcon.textContent = '...';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await sendDapRequest('variables', { variablesReference: variablesRef });
|
||||||
|
|
||||||
|
const childContainer = document.createElement('div');
|
||||||
|
childContainer.className = 'dap-tree-children ml-3';
|
||||||
|
|
||||||
|
for (const variable of response.variables) {
|
||||||
|
const hasChildren = variable.variablesReference > 0;
|
||||||
|
const childNode = createTreeNode(
|
||||||
|
variable.name,
|
||||||
|
variable.variablesReference,
|
||||||
|
hasChildren,
|
||||||
|
variable.value
|
||||||
|
);
|
||||||
|
childContainer.appendChild(childNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
node.appendChild(childContainer);
|
||||||
|
expandIcon.textContent = '\u25BC'; // ▼
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Content] Failed to load variables:', error);
|
||||||
|
expandIcon.textContent = '\u25B6'; // ▶
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle stopped event from DAP
|
||||||
|
*/
|
||||||
|
async function handleStoppedEvent(body) {
|
||||||
|
console.log('[Content] Stopped event:', body);
|
||||||
|
|
||||||
|
isConnected = true;
|
||||||
|
updateStatus('PAUSED', body.reason || 'paused');
|
||||||
|
enableControls(true);
|
||||||
|
|
||||||
|
// Get current location
|
||||||
|
try {
|
||||||
|
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
|
||||||
|
|
||||||
|
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
|
||||||
|
const currentFrame = stackTrace.stackFrames[0];
|
||||||
|
currentFrameId = currentFrame.id;
|
||||||
|
|
||||||
|
// Strip result indicator from step name for DOM lookup
|
||||||
|
// e.g., "Run tests [running]" -> "Run tests"
|
||||||
|
const rawStepName = stripResultIndicator(currentFrame.name);
|
||||||
|
let stepElement = findStepByName(rawStepName);
|
||||||
|
|
||||||
|
if (!stepElement) {
|
||||||
|
// Fallback: use step index
|
||||||
|
// Note: GitHub Actions UI shows "Set up job" at index 0, which is not a real workflow step
|
||||||
|
// DAP uses 1-based frame IDs, so frame ID 1 maps to UI step index 1 (skipping "Set up job")
|
||||||
|
const steps = getAllSteps();
|
||||||
|
const adjustedIndex = currentFrame.id; // 1-based, happens to match after skipping "Set up job"
|
||||||
|
if (adjustedIndex > 0 && adjustedIndex < steps.length) {
|
||||||
|
stepElement = steps[adjustedIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stepElement) {
|
||||||
|
moveDebuggerPane(stepElement, rawStepName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update step counter
|
||||||
|
const counter = debuggerPane?.querySelector('.dap-step-counter');
|
||||||
|
if (counter) {
|
||||||
|
counter.textContent = `Step ${currentFrame.id} of ${stackTrace.stackFrames.length}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load scopes
|
||||||
|
await loadScopes(currentFrame.id);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Content] Failed to get stack trace:', error);
|
||||||
|
appendOutput(`Error: ${error.message}`, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle output event from DAP
|
||||||
|
*/
|
||||||
|
function handleOutputEvent(body) {
|
||||||
|
if (body.output) {
|
||||||
|
const category = body.category === 'stderr' ? 'error' : 'stdout';
|
||||||
|
appendOutput(body.output.trimEnd(), category);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle terminated event from DAP
|
||||||
|
*/
|
||||||
|
function handleTerminatedEvent() {
|
||||||
|
isConnected = false;
|
||||||
|
updateStatus('TERMINATED');
|
||||||
|
enableControls(false);
|
||||||
|
|
||||||
|
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
|
||||||
|
if (stepInfo) {
|
||||||
|
stepInfo.textContent = 'Session ended';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load current debug state (used when page loads while already paused)
|
||||||
|
*/
|
||||||
|
async function loadCurrentDebugState() {
|
||||||
|
if (!debuggerPane) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stackTrace = await sendDapRequest('stackTrace', { threadId: 1 });
|
||||||
|
if (stackTrace.stackFrames && stackTrace.stackFrames.length > 0) {
|
||||||
|
const currentFrame = stackTrace.stackFrames[0];
|
||||||
|
currentFrameId = currentFrame.id;
|
||||||
|
|
||||||
|
// Move pane to current step
|
||||||
|
// Strip result indicator from step name for DOM lookup
|
||||||
|
const rawStepName = stripResultIndicator(currentFrame.name);
|
||||||
|
let stepElement = findStepByName(rawStepName);
|
||||||
|
|
||||||
|
if (!stepElement) {
|
||||||
|
// Fallback: use step index (skip "Set up job" at index 0)
|
||||||
|
const steps = getAllSteps();
|
||||||
|
const adjustedIndex = currentFrame.id; // 1-based, matches after skipping "Set up job"
|
||||||
|
if (adjustedIndex > 0 && adjustedIndex < steps.length) {
|
||||||
|
stepElement = steps[adjustedIndex];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stepElement) {
|
||||||
|
moveDebuggerPane(stepElement, rawStepName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update step counter
|
||||||
|
const counter = debuggerPane.querySelector('.dap-step-counter');
|
||||||
|
if (counter) {
|
||||||
|
counter.textContent = `Step ${currentFrame.id + 1} of ${stackTrace.stackFrames.length}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load scopes
|
||||||
|
await loadScopes(currentFrame.id);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[Content] Failed to load current debug state:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle status change from background
|
||||||
|
*/
|
||||||
|
function handleStatusChange(status) {
|
||||||
|
console.log('[Content] Status changed:', status);
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'connected':
|
||||||
|
isConnected = true;
|
||||||
|
updateStatus('CONNECTED');
|
||||||
|
const stepInfo = debuggerPane?.querySelector('.dap-step-info');
|
||||||
|
if (stepInfo) {
|
||||||
|
stepInfo.textContent = 'Waiting for debug event...';
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'paused':
|
||||||
|
isConnected = true;
|
||||||
|
updateStatus('PAUSED');
|
||||||
|
enableControls(true);
|
||||||
|
loadCurrentDebugState();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'running':
|
||||||
|
isConnected = true;
|
||||||
|
updateStatus('RUNNING');
|
||||||
|
enableControls(false);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'disconnected':
|
||||||
|
isConnected = false;
|
||||||
|
updateStatus('DISCONNECTED');
|
||||||
|
enableControls(false);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'error':
|
||||||
|
isConnected = false;
|
||||||
|
updateStatus('ERROR');
|
||||||
|
enableControls(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Listen for messages from background script
|
||||||
|
*/
|
||||||
|
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
|
||||||
|
console.log('[Content] Received message:', message.type);
|
||||||
|
|
||||||
|
switch (message.type) {
|
||||||
|
case 'dap-event':
|
||||||
|
const event = message.event;
|
||||||
|
switch (event.event) {
|
||||||
|
case 'stopped':
|
||||||
|
handleStoppedEvent(event.body);
|
||||||
|
break;
|
||||||
|
case 'output':
|
||||||
|
handleOutputEvent(event.body);
|
||||||
|
break;
|
||||||
|
case 'terminated':
|
||||||
|
handleTerminatedEvent();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'status-changed':
|
||||||
|
handleStatusChange(message.status);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inject debug button into GitHub Actions UI header
|
||||||
|
*/
|
||||||
|
function injectDebugButton() {
|
||||||
|
const container = document.querySelector('.js-check-run-search');
|
||||||
|
if (!container || container.querySelector('.dap-debug-btn-container')) {
|
||||||
|
return; // Already injected or container not found
|
||||||
|
}
|
||||||
|
|
||||||
|
const buttonContainer = document.createElement('div');
|
||||||
|
buttonContainer.className = 'ml-2 dap-debug-btn-container';
|
||||||
|
buttonContainer.innerHTML = `
|
||||||
|
<button type="button" class="btn btn-sm dap-debug-btn" title="Toggle DAP Debugger">
|
||||||
|
<svg viewBox="0 0 16 16" width="16" height="16" class="octicon mr-1" style="vertical-align: text-bottom;">
|
||||||
|
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||||
|
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||||
|
</svg>
|
||||||
|
Debug
|
||||||
|
</button>
|
||||||
|
`;
|
||||||
|
|
||||||
|
const button = buttonContainer.querySelector('button');
|
||||||
|
button.addEventListener('click', () => {
|
||||||
|
let pane = document.querySelector('.dap-debugger-pane');
|
||||||
|
if (pane) {
|
||||||
|
// Toggle visibility
|
||||||
|
pane.hidden = !pane.hidden;
|
||||||
|
button.classList.toggle('selected', !pane.hidden);
|
||||||
|
} else {
|
||||||
|
// Create and show pane
|
||||||
|
pane = injectDebuggerPane();
|
||||||
|
if (pane) {
|
||||||
|
button.classList.add('selected');
|
||||||
|
// Check connection status after creating pane
|
||||||
|
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
|
||||||
|
if (response && response.status) {
|
||||||
|
handleStatusChange(response.status);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Insert at the beginning of the container
|
||||||
|
container.insertBefore(buttonContainer, container.firstChild);
|
||||||
|
console.log('[Content] Debug button injected');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize content script
|
||||||
|
*/
|
||||||
|
function init() {
|
||||||
|
console.log('[Content] Actions DAP Debugger content script loaded');
|
||||||
|
|
||||||
|
// Check if we're on a job page
|
||||||
|
const steps = getAllSteps();
|
||||||
|
if (steps.length === 0) {
|
||||||
|
console.log('[Content] No steps found, waiting for DOM...');
|
||||||
|
// Wait for steps to appear
|
||||||
|
const observer = new MutationObserver((mutations) => {
|
||||||
|
const steps = getAllSteps();
|
||||||
|
if (steps.length > 0) {
|
||||||
|
observer.disconnect();
|
||||||
|
console.log('[Content] Steps found, injecting debug button');
|
||||||
|
injectDebugButton();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
observer.observe(document.body, { childList: true, subtree: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inject debug button in header (user can click to show debugger pane)
|
||||||
|
injectDebugButton();
|
||||||
|
|
||||||
|
// Check current connection status
|
||||||
|
chrome.runtime.sendMessage({ type: 'get-status' }, async (response) => {
|
||||||
|
if (response && response.status) {
|
||||||
|
handleStatusChange(response.status);
|
||||||
|
|
||||||
|
// If already connected/paused, auto-show the debugger pane
|
||||||
|
if (response.status === 'paused' || response.status === 'connected') {
|
||||||
|
const pane = document.querySelector('.dap-debugger-pane');
|
||||||
|
if (!pane) {
|
||||||
|
injectDebuggerPane();
|
||||||
|
const btn = document.querySelector('.dap-debug-btn');
|
||||||
|
if (btn) btn.classList.add('selected');
|
||||||
|
}
|
||||||
|
|
||||||
|
// If already paused, load the current debug state
|
||||||
|
if (response.status === 'paused') {
|
||||||
|
await loadCurrentDebugState();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize when DOM is ready
|
||||||
|
if (document.readyState === 'loading') {
|
||||||
|
document.addEventListener('DOMContentLoaded', init);
|
||||||
|
} else {
|
||||||
|
init();
|
||||||
|
}
|
||||||
135
browser-ext/icons/generate.js
Normal file
135
browser-ext/icons/generate.js
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* Create simple green circle PNG icons
|
||||||
|
* No dependencies required - uses pure JavaScript to create valid PNG files
|
||||||
|
*/
|
||||||
|
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const zlib = require('zlib');
|
||||||
|
|
||||||
|
function createPNG(size) {
|
||||||
|
// PNG uses RGBA format, one pixel = 4 bytes
|
||||||
|
const pixelData = [];
|
||||||
|
|
||||||
|
const centerX = size / 2;
|
||||||
|
const centerY = size / 2;
|
||||||
|
const radius = size / 2 - 1;
|
||||||
|
const innerRadius = radius * 0.4;
|
||||||
|
|
||||||
|
for (let y = 0; y < size; y++) {
|
||||||
|
pixelData.push(0); // Filter byte for each row
|
||||||
|
for (let x = 0; x < size; x++) {
|
||||||
|
const dx = x - centerX;
|
||||||
|
const dy = y - centerY;
|
||||||
|
const dist = Math.sqrt(dx * dx + dy * dy);
|
||||||
|
|
||||||
|
if (dist <= radius) {
|
||||||
|
// Green circle (#238636)
|
||||||
|
pixelData.push(35, 134, 54, 255);
|
||||||
|
} else {
|
||||||
|
// Transparent
|
||||||
|
pixelData.push(0, 0, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a white "bug" shape in the center
|
||||||
|
for (let y = 0; y < size; y++) {
|
||||||
|
for (let x = 0; x < size; x++) {
|
||||||
|
const dx = x - centerX;
|
||||||
|
const dy = y - centerY;
|
||||||
|
const dist = Math.sqrt(dx * dx + dy * dy);
|
||||||
|
|
||||||
|
// Bug body (oval)
|
||||||
|
const bodyDx = dx;
|
||||||
|
const bodyDy = (dy - size * 0.05) / 1.3;
|
||||||
|
const bodyDist = Math.sqrt(bodyDx * bodyDx + bodyDy * bodyDy);
|
||||||
|
|
||||||
|
// Bug head (circle above body)
|
||||||
|
const headDx = dx;
|
||||||
|
const headDy = dy + size * 0.15;
|
||||||
|
const headDist = Math.sqrt(headDx * headDx + headDy * headDy);
|
||||||
|
|
||||||
|
if (bodyDist < innerRadius || headDist < innerRadius * 0.6) {
|
||||||
|
const idx = 1 + y * (1 + size * 4) + x * 4;
|
||||||
|
pixelData[idx] = 255;
|
||||||
|
pixelData[idx + 1] = 255;
|
||||||
|
pixelData[idx + 2] = 255;
|
||||||
|
pixelData[idx + 3] = 255;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawData = Buffer.from(pixelData);
|
||||||
|
const compressed = zlib.deflateSync(rawData);
|
||||||
|
|
||||||
|
// Build PNG file
|
||||||
|
const chunks = [];
|
||||||
|
|
||||||
|
// PNG signature
|
||||||
|
chunks.push(Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]));
|
||||||
|
|
||||||
|
// IHDR chunk
|
||||||
|
const ihdr = Buffer.alloc(13);
|
||||||
|
ihdr.writeUInt32BE(size, 0); // width
|
||||||
|
ihdr.writeUInt32BE(size, 4); // height
|
||||||
|
ihdr.writeUInt8(8, 8); // bit depth
|
||||||
|
ihdr.writeUInt8(6, 9); // color type (RGBA)
|
||||||
|
ihdr.writeUInt8(0, 10); // compression
|
||||||
|
ihdr.writeUInt8(0, 11); // filter
|
||||||
|
ihdr.writeUInt8(0, 12); // interlace
|
||||||
|
chunks.push(createChunk('IHDR', ihdr));
|
||||||
|
|
||||||
|
// IDAT chunk
|
||||||
|
chunks.push(createChunk('IDAT', compressed));
|
||||||
|
|
||||||
|
// IEND chunk
|
||||||
|
chunks.push(createChunk('IEND', Buffer.alloc(0)));
|
||||||
|
|
||||||
|
return Buffer.concat(chunks);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createChunk(type, data) {
|
||||||
|
const typeBuffer = Buffer.from(type);
|
||||||
|
const length = Buffer.alloc(4);
|
||||||
|
length.writeUInt32BE(data.length, 0);
|
||||||
|
|
||||||
|
const crcData = Buffer.concat([typeBuffer, data]);
|
||||||
|
const crc = Buffer.alloc(4);
|
||||||
|
crc.writeUInt32BE(crc32(crcData), 0);
|
||||||
|
|
||||||
|
return Buffer.concat([length, typeBuffer, data, crc]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// CRC32 implementation
|
||||||
|
function crc32(buf) {
|
||||||
|
let crc = 0xffffffff;
|
||||||
|
for (let i = 0; i < buf.length; i++) {
|
||||||
|
crc = crc32Table[(crc ^ buf[i]) & 0xff] ^ (crc >>> 8);
|
||||||
|
}
|
||||||
|
return (crc ^ 0xffffffff) >>> 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// CRC32 lookup table
|
||||||
|
const crc32Table = new Uint32Array(256);
|
||||||
|
for (let i = 0; i < 256; i++) {
|
||||||
|
let c = i;
|
||||||
|
for (let j = 0; j < 8; j++) {
|
||||||
|
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1;
|
||||||
|
}
|
||||||
|
crc32Table[i] = c;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate icons
|
||||||
|
const iconsDir = path.join(__dirname);
|
||||||
|
const sizes = [16, 48, 128];
|
||||||
|
|
||||||
|
sizes.forEach((size) => {
|
||||||
|
const png = createPNG(size);
|
||||||
|
const filename = `icon${size}.png`;
|
||||||
|
fs.writeFileSync(path.join(iconsDir, filename), png);
|
||||||
|
console.log(`Created ${filename} (${size}x${size})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Done!');
|
||||||
BIN
browser-ext/icons/icon128.png
Normal file
BIN
browser-ext/icons/icon128.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 872 B |
BIN
browser-ext/icons/icon16.png
Normal file
BIN
browser-ext/icons/icon16.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 126 B |
BIN
browser-ext/icons/icon48.png
Normal file
BIN
browser-ext/icons/icon48.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 258 B |
32
browser-ext/manifest.json
Normal file
32
browser-ext/manifest.json
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"manifest_version": 3,
|
||||||
|
"name": "Actions DAP Debugger",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"description": "Debug GitHub Actions workflows with DAP - interactive debugging directly in the browser",
|
||||||
|
"permissions": ["activeTab", "storage"],
|
||||||
|
"host_permissions": ["https://github.com/*"],
|
||||||
|
"background": {
|
||||||
|
"service_worker": "background/background.js"
|
||||||
|
},
|
||||||
|
"content_scripts": [
|
||||||
|
{
|
||||||
|
"matches": ["https://github.com/*/*/actions/runs/*/job/*"],
|
||||||
|
"js": ["lib/dap-protocol.js", "content/content.js"],
|
||||||
|
"css": ["content/content.css"],
|
||||||
|
"run_at": "document_idle"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"action": {
|
||||||
|
"default_popup": "popup/popup.html",
|
||||||
|
"default_icon": {
|
||||||
|
"16": "icons/icon16.png",
|
||||||
|
"48": "icons/icon48.png",
|
||||||
|
"128": "icons/icon128.png"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"icons": {
|
||||||
|
"16": "icons/icon16.png",
|
||||||
|
"48": "icons/icon48.png",
|
||||||
|
"128": "icons/icon128.png"
|
||||||
|
}
|
||||||
|
}
|
||||||
228
browser-ext/popup/popup.css
Normal file
228
browser-ext/popup/popup.css
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
/**
|
||||||
|
* Popup Styles
|
||||||
|
*
|
||||||
|
* GitHub-inspired dark theme for the extension popup.
|
||||||
|
*/
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
width: 320px;
|
||||||
|
padding: 16px;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
background-color: #0d1117;
|
||||||
|
color: #e6edf3;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
margin: 0 0 16px 0;
|
||||||
|
font-size: 16px;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 .icon {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status Section */
|
||||||
|
.status-section {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
padding: 12px;
|
||||||
|
background-color: #161b22;
|
||||||
|
border-radius: 6px;
|
||||||
|
border: 1px solid #30363d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator {
|
||||||
|
width: 10px;
|
||||||
|
height: 10px;
|
||||||
|
border-radius: 50%;
|
||||||
|
margin-right: 10px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-disconnected {
|
||||||
|
background-color: #6e7681;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-connecting {
|
||||||
|
background-color: #9e6a03;
|
||||||
|
animation: pulse 1.5s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-connected {
|
||||||
|
background-color: #238636;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-paused {
|
||||||
|
background-color: #9e6a03;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-running {
|
||||||
|
background-color: #238636;
|
||||||
|
animation: pulse 1.5s ease-in-out infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-error {
|
||||||
|
background-color: #da3633;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes pulse {
|
||||||
|
0%, 100% {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
50% {
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#status-text {
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Config Section */
|
||||||
|
.config-section {
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-section label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 500;
|
||||||
|
color: #8b949e;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-section input {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
padding: 8px 12px;
|
||||||
|
margin-top: 6px;
|
||||||
|
background-color: #0d1117;
|
||||||
|
border: 1px solid #30363d;
|
||||||
|
border-radius: 6px;
|
||||||
|
color: #e6edf3;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-section input:focus {
|
||||||
|
border-color: #1f6feb;
|
||||||
|
outline: none;
|
||||||
|
box-shadow: 0 0 0 3px rgba(31, 111, 235, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-section input:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-hint {
|
||||||
|
font-size: 11px;
|
||||||
|
color: #6e7681;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Actions Section */
|
||||||
|
.actions-section {
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
button {
|
||||||
|
flex: 1;
|
||||||
|
padding: 10px 16px;
|
||||||
|
border: none;
|
||||||
|
border-radius: 6px;
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.15s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
button:disabled {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary {
|
||||||
|
background-color: #238636;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary:hover:not(:disabled) {
|
||||||
|
background-color: #2ea043;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary {
|
||||||
|
background-color: #21262d;
|
||||||
|
color: #e6edf3;
|
||||||
|
border: 1px solid #30363d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary:hover:not(:disabled) {
|
||||||
|
background-color: #30363d;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Help Section */
|
||||||
|
.help-section {
|
||||||
|
font-size: 12px;
|
||||||
|
color: #8b949e;
|
||||||
|
background-color: #161b22;
|
||||||
|
border: 1px solid #30363d;
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 12px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-section p {
|
||||||
|
margin: 6px 0;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-section p:first-child {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-section strong {
|
||||||
|
color: #e6edf3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.help-section code {
|
||||||
|
display: block;
|
||||||
|
background-color: #0d1117;
|
||||||
|
padding: 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
font-size: 11px;
|
||||||
|
overflow-x: auto;
|
||||||
|
margin: 8px 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Footer */
|
||||||
|
.footer {
|
||||||
|
text-align: center;
|
||||||
|
padding-top: 8px;
|
||||||
|
border-top: 1px solid #21262d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer a {
|
||||||
|
color: #58a6ff;
|
||||||
|
text-decoration: none;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
52
browser-ext/popup/popup.html
Normal file
52
browser-ext/popup/popup.html
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<link rel="stylesheet" href="popup.css">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="popup-container">
|
||||||
|
<h3>
|
||||||
|
<svg class="icon" viewBox="0 0 16 16" width="16" height="16">
|
||||||
|
<path fill="currentColor" d="M4.72.22a.75.75 0 0 1 1.06 0l1 1a.75.75 0 0 1-1.06 1.06l-.22-.22-.22.22a.75.75 0 0 1-1.06-1.06l1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M11.28.22a.75.75 0 0 0-1.06 0l-1 1a.75.75 0 0 0 1.06 1.06l.22-.22.22.22a.75.75 0 0 0 1.06-1.06l-1-1Z"/>
|
||||||
|
<path fill="currentColor" d="M8 4a4 4 0 0 0-4 4v1h1v2.5a2.5 2.5 0 0 0 2.5 2.5h1a2.5 2.5 0 0 0 2.5-2.5V9h1V8a4 4 0 0 0-4-4Z"/>
|
||||||
|
<path fill="currentColor" d="M5 9H3.5a.5.5 0 0 0-.5.5v2a.5.5 0 0 0 .5.5H5V9ZM11 9h1.5a.5.5 0 0 1 .5.5v2a.5.5 0 0 1-.5.5H11V9Z"/>
|
||||||
|
</svg>
|
||||||
|
Actions DAP Debugger
|
||||||
|
</h3>
|
||||||
|
|
||||||
|
<div class="status-section">
|
||||||
|
<div class="status-indicator" id="status-indicator"></div>
|
||||||
|
<span id="status-text">Disconnected</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="config-section">
|
||||||
|
<label>
|
||||||
|
Proxy URL
|
||||||
|
<input type="text" id="proxy-url" value="ws://localhost:4712"
|
||||||
|
placeholder="ws://localhost:4712 or wss://...">
|
||||||
|
</label>
|
||||||
|
<p class="config-hint">For codespaces, use the forwarded URL (wss://...)</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="actions-section">
|
||||||
|
<button id="connect-btn" class="btn-primary">Connect</button>
|
||||||
|
<button id="disconnect-btn" class="btn-secondary" disabled>Disconnect</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="help-section">
|
||||||
|
<p><strong>Quick Start:</strong></p>
|
||||||
|
<p>1. Start the proxy:</p>
|
||||||
|
<code>cd browser-ext/proxy && npm install && node proxy.js</code>
|
||||||
|
<p>2. Re-run your GitHub Actions job with "Enable debug logging"</p>
|
||||||
|
<p>3. Click Connect when the job is waiting for debugger</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="footer">
|
||||||
|
<a href="https://github.com/actions/runner" target="_blank">Documentation</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<script src="popup.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
95
browser-ext/popup/popup.js
Normal file
95
browser-ext/popup/popup.js
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* Popup Script
|
||||||
|
*
|
||||||
|
* Handles extension popup UI and connection management.
|
||||||
|
*/
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
const statusIndicator = document.getElementById('status-indicator');
|
||||||
|
const statusText = document.getElementById('status-text');
|
||||||
|
const connectBtn = document.getElementById('connect-btn');
|
||||||
|
const disconnectBtn = document.getElementById('disconnect-btn');
|
||||||
|
const urlInput = document.getElementById('proxy-url');
|
||||||
|
|
||||||
|
// Load saved config
|
||||||
|
chrome.storage.local.get(['proxyUrl'], (data) => {
|
||||||
|
if (data.proxyUrl) urlInput.value = data.proxyUrl;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get current status from background
|
||||||
|
chrome.runtime.sendMessage({ type: 'get-status' }, (response) => {
|
||||||
|
if (response) {
|
||||||
|
updateStatusUI(response.status, response.reconnecting);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Listen for status changes
|
||||||
|
chrome.runtime.onMessage.addListener((message) => {
|
||||||
|
if (message.type === 'status-changed') {
|
||||||
|
updateStatusUI(message.status, message.reconnecting);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Connect button
|
||||||
|
connectBtn.addEventListener('click', () => {
|
||||||
|
const url = urlInput.value.trim() || 'ws://localhost:4712';
|
||||||
|
|
||||||
|
// Save config
|
||||||
|
chrome.storage.local.set({ proxyUrl: url });
|
||||||
|
|
||||||
|
// Update UI immediately
|
||||||
|
updateStatusUI('connecting');
|
||||||
|
|
||||||
|
// Connect
|
||||||
|
chrome.runtime.sendMessage({ type: 'connect', url }, (response) => {
|
||||||
|
if (response && response.status) {
|
||||||
|
updateStatusUI(response.status);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Disconnect button
|
||||||
|
disconnectBtn.addEventListener('click', () => {
|
||||||
|
chrome.runtime.sendMessage({ type: 'disconnect' }, (response) => {
|
||||||
|
if (response && response.status) {
|
||||||
|
updateStatusUI(response.status);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the UI to reflect current status
|
||||||
|
*/
|
||||||
|
function updateStatusUI(status, reconnecting = false) {
|
||||||
|
// Update text
|
||||||
|
const statusNames = {
|
||||||
|
disconnected: 'Disconnected',
|
||||||
|
connecting: reconnecting ? 'Reconnecting...' : 'Connecting...',
|
||||||
|
connected: 'Connected',
|
||||||
|
paused: 'Paused',
|
||||||
|
running: 'Running',
|
||||||
|
error: 'Connection Error',
|
||||||
|
};
|
||||||
|
statusText.textContent = statusNames[status] || status;
|
||||||
|
|
||||||
|
// Update indicator color
|
||||||
|
statusIndicator.className = 'status-indicator status-' + status;
|
||||||
|
|
||||||
|
// Update button states
|
||||||
|
const isConnected = ['connected', 'paused', 'running'].includes(status);
|
||||||
|
const isConnecting = status === 'connecting';
|
||||||
|
|
||||||
|
connectBtn.disabled = isConnected || isConnecting;
|
||||||
|
disconnectBtn.disabled = status === 'disconnected';
|
||||||
|
|
||||||
|
// Update connect button text
|
||||||
|
if (isConnecting) {
|
||||||
|
connectBtn.textContent = reconnecting ? 'Reconnecting...' : 'Connecting...';
|
||||||
|
} else {
|
||||||
|
connectBtn.textContent = 'Connect';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable inputs when connected
|
||||||
|
urlInput.disabled = isConnected || isConnecting;
|
||||||
|
}
|
||||||
|
});
|
||||||
36
browser-ext/proxy/package-lock.json
generated
Normal file
36
browser-ext/proxy/package-lock.json
generated
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "dap-websocket-proxy",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "dap-websocket-proxy",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"dependencies": {
|
||||||
|
"ws": "^8.16.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/ws": {
|
||||||
|
"version": "8.19.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
|
||||||
|
"integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.0.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"bufferutil": "^4.0.1",
|
||||||
|
"utf-8-validate": ">=5.0.2"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"bufferutil": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"utf-8-validate": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
browser-ext/proxy/package.json
Normal file
12
browser-ext/proxy/package.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"name": "dap-websocket-proxy",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "WebSocket-to-TCP bridge for DAP debugging",
|
||||||
|
"main": "proxy.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "node proxy.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"ws": "^8.16.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
207
browser-ext/proxy/proxy.js
Normal file
207
browser-ext/proxy/proxy.js
Normal file
@@ -0,0 +1,207 @@
|
|||||||
|
/**
|
||||||
|
* DAP WebSocket-to-TCP Proxy
|
||||||
|
*
|
||||||
|
* Bridges WebSocket connections from browser extensions to the DAP TCP server.
|
||||||
|
* Handles DAP message framing (Content-Length headers).
|
||||||
|
*
|
||||||
|
* Usage: node proxy.js [--ws-port 4712] [--dap-host 127.0.0.1] [--dap-port 4711]
|
||||||
|
*/
|
||||||
|
|
||||||
|
const WebSocket = require('ws');
|
||||||
|
const net = require('net');
|
||||||
|
|
||||||
|
// Configuration (can be overridden via CLI args)
|
||||||
|
const config = {
|
||||||
|
wsPort: parseInt(process.env.WS_PORT) || 4712,
|
||||||
|
dapHost: process.env.DAP_HOST || '127.0.0.1',
|
||||||
|
dapPort: parseInt(process.env.DAP_PORT) || 4711,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Parse CLI arguments
|
||||||
|
for (let i = 2; i < process.argv.length; i++) {
|
||||||
|
switch (process.argv[i]) {
|
||||||
|
case '--ws-port':
|
||||||
|
config.wsPort = parseInt(process.argv[++i]);
|
||||||
|
break;
|
||||||
|
case '--dap-host':
|
||||||
|
config.dapHost = process.argv[++i];
|
||||||
|
break;
|
||||||
|
case '--dap-port':
|
||||||
|
config.dapPort = parseInt(process.argv[++i]);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Proxy] Starting WebSocket-to-TCP proxy`);
|
||||||
|
console.log(`[Proxy] WebSocket: ws://localhost:${config.wsPort}`);
|
||||||
|
console.log(`[Proxy] DAP Server: tcp://${config.dapHost}:${config.dapPort}`);
|
||||||
|
|
||||||
|
const wss = new WebSocket.Server({
|
||||||
|
port: config.wsPort,
|
||||||
|
// Enable ping/pong for connection health checks
|
||||||
|
clientTracking: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`[Proxy] WebSocket server listening on port ${config.wsPort}`);
|
||||||
|
|
||||||
|
// Ping all clients every 25 seconds to detect dead connections
|
||||||
|
// This is shorter than Chrome's service worker timeout (~30s)
|
||||||
|
const PING_INTERVAL = 25000;
|
||||||
|
const pingInterval = setInterval(() => {
|
||||||
|
wss.clients.forEach((ws) => {
|
||||||
|
if (ws.isAlive === false) {
|
||||||
|
console.log(`[Proxy] Client failed to respond to ping, terminating`);
|
||||||
|
return ws.terminate();
|
||||||
|
}
|
||||||
|
ws.isAlive = false;
|
||||||
|
ws.ping();
|
||||||
|
});
|
||||||
|
}, PING_INTERVAL);
|
||||||
|
|
||||||
|
wss.on('connection', (ws, req) => {
|
||||||
|
const clientId = `${req.socket.remoteAddress}:${req.socket.remotePort}`;
|
||||||
|
console.log(`[Proxy] WebSocket client connected: ${clientId}`);
|
||||||
|
|
||||||
|
// Mark as alive for ping/pong tracking
|
||||||
|
ws.isAlive = true;
|
||||||
|
ws.on('pong', () => {
|
||||||
|
ws.isAlive = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Connect to DAP TCP server
|
||||||
|
const tcp = net.createConnection({
|
||||||
|
host: config.dapHost,
|
||||||
|
port: config.dapPort,
|
||||||
|
});
|
||||||
|
|
||||||
|
let tcpBuffer = '';
|
||||||
|
let tcpConnected = false;
|
||||||
|
|
||||||
|
tcp.on('connect', () => {
|
||||||
|
tcpConnected = true;
|
||||||
|
console.log(`[Proxy] Connected to DAP server at ${config.dapHost}:${config.dapPort}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
tcp.on('error', (err) => {
|
||||||
|
console.error(`[Proxy] TCP error: ${err.message}`);
|
||||||
|
if (ws.readyState === WebSocket.OPEN) {
|
||||||
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'proxy-error',
|
||||||
|
message: `Failed to connect to DAP server: ${err.message}`,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
ws.close(1011, 'DAP server connection failed');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tcp.on('close', () => {
|
||||||
|
console.log(`[Proxy] TCP connection closed`);
|
||||||
|
if (ws.readyState === WebSocket.OPEN) {
|
||||||
|
ws.close(1000, 'DAP server disconnected');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// WebSocket → TCP: Add Content-Length framing
|
||||||
|
ws.on('message', (data) => {
|
||||||
|
const json = data.toString();
|
||||||
|
try {
|
||||||
|
// Validate it's valid JSON
|
||||||
|
const parsed = JSON.parse(json);
|
||||||
|
|
||||||
|
// Handle keepalive messages from the browser extension - don't forward to DAP server
|
||||||
|
if (parsed.type === 'keepalive') {
|
||||||
|
console.log(`[Proxy] Keepalive received from client`);
|
||||||
|
// Respond with a keepalive-ack to confirm the connection is alive
|
||||||
|
if (ws.readyState === WebSocket.OPEN) {
|
||||||
|
ws.send(JSON.stringify({ type: 'keepalive-ack', timestamp: Date.now() }));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tcpConnected) {
|
||||||
|
console.warn(`[Proxy] TCP not connected, dropping message`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[Proxy] WS→TCP: ${parsed.command || parsed.event || 'message'}`);
|
||||||
|
|
||||||
|
// Add DAP framing
|
||||||
|
const framed = `Content-Length: ${Buffer.byteLength(json)}\r\n\r\n${json}`;
|
||||||
|
tcp.write(framed);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[Proxy] Invalid JSON from WebSocket: ${err.message}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// TCP → WebSocket: Parse Content-Length framing
|
||||||
|
tcp.on('data', (chunk) => {
|
||||||
|
tcpBuffer += chunk.toString();
|
||||||
|
|
||||||
|
// Process complete DAP messages from buffer
|
||||||
|
while (true) {
|
||||||
|
// Look for Content-Length header
|
||||||
|
const headerEnd = tcpBuffer.indexOf('\r\n\r\n');
|
||||||
|
if (headerEnd === -1) break;
|
||||||
|
|
||||||
|
const header = tcpBuffer.substring(0, headerEnd);
|
||||||
|
const match = header.match(/Content-Length:\s*(\d+)/i);
|
||||||
|
if (!match) {
|
||||||
|
console.error(`[Proxy] Invalid DAP header: ${header}`);
|
||||||
|
tcpBuffer = tcpBuffer.substring(headerEnd + 4);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentLength = parseInt(match[1]);
|
||||||
|
const messageStart = headerEnd + 4;
|
||||||
|
const messageEnd = messageStart + contentLength;
|
||||||
|
|
||||||
|
// Check if we have the complete message
|
||||||
|
if (tcpBuffer.length < messageEnd) break;
|
||||||
|
|
||||||
|
// Extract the JSON message
|
||||||
|
const json = tcpBuffer.substring(messageStart, messageEnd);
|
||||||
|
tcpBuffer = tcpBuffer.substring(messageEnd);
|
||||||
|
|
||||||
|
// Send to WebSocket
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(json);
|
||||||
|
console.log(
|
||||||
|
`[Proxy] TCP→WS: ${parsed.type} ${parsed.command || parsed.event || ''} ${parsed.request_seq ? `(req_seq: ${parsed.request_seq})` : ''}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (ws.readyState === WebSocket.OPEN) {
|
||||||
|
ws.send(json);
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[Proxy] Invalid JSON from TCP: ${err.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle WebSocket close
|
||||||
|
ws.on('close', (code, reason) => {
|
||||||
|
console.log(`[Proxy] WebSocket closed: ${code} ${reason}`);
|
||||||
|
tcp.end();
|
||||||
|
});
|
||||||
|
|
||||||
|
ws.on('error', (err) => {
|
||||||
|
console.error(`[Proxy] WebSocket error: ${err.message}`);
|
||||||
|
tcp.end();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
wss.on('error', (err) => {
|
||||||
|
console.error(`[Proxy] WebSocket server error: ${err.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Graceful shutdown
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
console.log(`\n[Proxy] Shutting down...`);
|
||||||
|
clearInterval(pingInterval);
|
||||||
|
wss.clients.forEach((ws) => ws.close(1001, 'Server shutting down'));
|
||||||
|
wss.close(() => {
|
||||||
|
console.log(`[Proxy] Goodbye!`);
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -250,6 +250,42 @@ Two problem matchers can be used:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Default from path
|
||||||
|
|
||||||
|
The problem matcher can specify a `fromPath` property at the top level, which applies when a specific pattern doesn't provide a value for `fromPath`. This is useful for tools that don't include project file information in their output.
|
||||||
|
|
||||||
|
For example, given the following compiler output that doesn't include project file information:
|
||||||
|
|
||||||
|
```
|
||||||
|
ClassLibrary.cs(16,24): warning CS0612: 'ClassLibrary.Helpers.MyHelper.Name' is obsolete
|
||||||
|
```
|
||||||
|
|
||||||
|
A problem matcher with a default from path can be used:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "csc-minimal",
|
||||||
|
"fromPath": "ClassLibrary/ClassLibrary.csproj",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+)\\((\\d+),(\\d+)\\): (error|warning) (.+): (.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"severity": 4,
|
||||||
|
"code": 5,
|
||||||
|
"message": 6
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This ensures that the file is rooted to the correct path when there's not enough information in the error messages to extract a `fromPath`.
|
||||||
|
|
||||||
#### Mitigate regular expression denial of service (ReDos)
|
#### Mitigate regular expression denial of service (ReDos)
|
||||||
|
|
||||||
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ This feature is mainly intended for self hosted runner administrators.
|
|||||||
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
|
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
|
||||||
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
|
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
|
||||||
|
|
||||||
You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file.
|
You can set these variables to the **absolute** path of a `.sh` or `.ps1` file.
|
||||||
|
|
||||||
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
|
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
|
||||||
- `.sh` files will execute with the args `-e {pathtofile}`
|
- `.sh` files will execute with the args `-e {pathtofile}`
|
||||||
|
|||||||
@@ -4,9 +4,9 @@
|
|||||||
|
|
||||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||||
|
|
||||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node20/`.
|
The runner carries its own copies of node.js executables under `<runner_root>/externals/node20/` and `<runner_root>/externals/node24/`.
|
||||||
|
|
||||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node20/`.
|
All javascript base Actions will get executed by the built-in `node` at either `<runner_root>/externals/node20/` or `<runner_root>/externals/node24/` depending on the version specified in the action's metadata.
|
||||||
|
|
||||||
> Not the `node` from `$PATH`
|
> Not the `node` from `$PATH`
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Contributions
|
# Contributions
|
||||||
|
|
||||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||||
|
|
||||||
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
||||||
|
|
||||||
|
|||||||
217
docs/dependency-management.md
Normal file
217
docs/dependency-management.md
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
# Runner Dependency Management Process
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
|
||||||
|
|
||||||
|
## Release Schedule
|
||||||
|
|
||||||
|
- **Monthly Runner Releases**: New runner versions are released monthly
|
||||||
|
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
|
||||||
|
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
|
||||||
|
|
||||||
|
## Automated Workflows
|
||||||
|
|
||||||
|
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
|
||||||
|
|
||||||
|
### 1. Foundation Labels
|
||||||
|
|
||||||
|
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
|
||||||
|
- **Purpose**: Creates consistent dependency labels for all automation workflows
|
||||||
|
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
|
||||||
|
- **Prerequisite**: Must be merged before other workflows for proper labeling
|
||||||
|
|
||||||
|
### 2. Node.js Version Updates
|
||||||
|
|
||||||
|
- **Workflow**: `.github/workflows/node-upgrade.yml`
|
||||||
|
- **Schedule**: Mondays at 6:00 AM UTC
|
||||||
|
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
|
||||||
|
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||||
|
- **Priority**: First (NPM depends on current Node.js versions)
|
||||||
|
|
||||||
|
### 3. NPM Security Audit
|
||||||
|
|
||||||
|
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
|
||||||
|
- **Schedule**: Mondays at 7:00 AM UTC
|
||||||
|
- **Purpose**: Automated security vulnerability detection and basic fixes
|
||||||
|
- **Location**: `src/Misc/expressionFunc/hashFiles/`
|
||||||
|
- **Features**: npm audit, security patch application, PR creation
|
||||||
|
- **Dependency**: Runs after Node.js updates for optimal compatibility
|
||||||
|
|
||||||
|
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
|
||||||
|
- **Trigger**: Manual dispatch only
|
||||||
|
- **Purpose**: Manual security audit with TypeScript compatibility fixes
|
||||||
|
- **Use Case**: When scheduled workflow fails or needs custom intervention
|
||||||
|
- **Features**: Enhanced TypeScript auto-repair, graduated security response
|
||||||
|
- **How to Use**:
|
||||||
|
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
|
||||||
|
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
|
||||||
|
3. Click "Run workflow" and optionally specify fix level (auto/manual)
|
||||||
|
4. Review the generated PR for TypeScript compatibility issues
|
||||||
|
|
||||||
|
### 4. .NET SDK Updates
|
||||||
|
|
||||||
|
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
|
||||||
|
- **Schedule**: Mondays at midnight UTC
|
||||||
|
- **Purpose**: Updates .NET SDK and package versions with build validation
|
||||||
|
- **Features**: Global.json updates, NuGet package management, compatibility checking
|
||||||
|
- **Independence**: Runs independently of Node.js/NPM updates
|
||||||
|
|
||||||
|
### 5. Docker/Buildx Updates
|
||||||
|
|
||||||
|
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
|
||||||
|
- **Schedule**: Mondays at midnight UTC
|
||||||
|
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
|
||||||
|
- **Features**: Container security scanning, multi-architecture build testing
|
||||||
|
- **Independence**: Runs independently of other dependency updates
|
||||||
|
|
||||||
|
### 6. Dependency Monitoring
|
||||||
|
|
||||||
|
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
|
||||||
|
- **Schedule**: Mondays at 11:00 AM UTC
|
||||||
|
- **Purpose**: Comprehensive status report of all dependencies with security audit
|
||||||
|
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
|
||||||
|
- **Summary**: Runs last to capture results from all morning dependency updates
|
||||||
|
|
||||||
|
## Release Process Integration
|
||||||
|
|
||||||
|
### Pre-Release Checklist
|
||||||
|
|
||||||
|
Before each monthly runner release:
|
||||||
|
|
||||||
|
1. **Check Dependency PRs**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List all open dependency PRs
|
||||||
|
gh pr list --label "dependencies" --state open
|
||||||
|
|
||||||
|
# List only automated weekly dependency updates
|
||||||
|
gh pr list --label "dependencies-weekly-check" --state open
|
||||||
|
|
||||||
|
# List only custom dependency automation (not dependabot)
|
||||||
|
gh pr list --label "dependencies-not-dependabot" --state open
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Run Manual Dependency Check**:
|
||||||
|
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
|
||||||
|
- Review the summary for any outdated dependencies
|
||||||
|
|
||||||
|
3. **Review and Merge Updates**:
|
||||||
|
- Prioritize security-related updates
|
||||||
|
- Test dependency updates in development environment
|
||||||
|
- Merge approved dependency PRs
|
||||||
|
|
||||||
|
### Vulnerability Response
|
||||||
|
|
||||||
|
#### Critical Security Vulnerabilities
|
||||||
|
|
||||||
|
- **Response Time**: Within 24 hours
|
||||||
|
- **Process**:
|
||||||
|
1. Assess impact on runner security
|
||||||
|
2. Create hotfix branch if runner data security is affected
|
||||||
|
3. Expedite patch release if necessary
|
||||||
|
4. Document in security advisory if applicable
|
||||||
|
|
||||||
|
#### Non-Critical Vulnerabilities
|
||||||
|
|
||||||
|
- **Response Time**: Next monthly release
|
||||||
|
- **Process**:
|
||||||
|
1. Evaluate if vulnerability affects runner functionality
|
||||||
|
2. Include fix in regular dependency update cycle
|
||||||
|
3. Document in release notes
|
||||||
|
|
||||||
|
## Monitoring and Alerts
|
||||||
|
|
||||||
|
### GitHub Actions Workflow Status
|
||||||
|
|
||||||
|
- All dependency workflows create PRs with the `dependencies` label
|
||||||
|
- Failed workflows should be investigated immediately
|
||||||
|
- Weekly dependency status reports are generated automatically
|
||||||
|
|
||||||
|
### Manual Checks
|
||||||
|
|
||||||
|
You can manually trigger dependency checks:
|
||||||
|
|
||||||
|
- **Full Status**: Run "Dependency Status Check" workflow
|
||||||
|
- **Specific Component**: Use the dropdown to check individual dependencies
|
||||||
|
|
||||||
|
## Dependency Labels
|
||||||
|
|
||||||
|
All automated dependency PRs are tagged with labels for easy filtering and management:
|
||||||
|
|
||||||
|
### Primary Labels
|
||||||
|
|
||||||
|
- **`dependencies`**: All automated dependency-related PRs
|
||||||
|
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
|
||||||
|
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
|
||||||
|
- **`security`**: Security vulnerability fixes and patches
|
||||||
|
- **`typescript`**: TypeScript compatibility and type definition updates
|
||||||
|
- **`needs-manual-review`**: Complex updates requiring human verification
|
||||||
|
|
||||||
|
### Technology-Specific Labels
|
||||||
|
|
||||||
|
- **`node`**: Node.js version updates
|
||||||
|
- **`javascript`**: JavaScript runtime and tooling updates
|
||||||
|
- **`npm`**: NPM package and security updates
|
||||||
|
- **`dotnet`**: .NET SDK and NuGet package updates
|
||||||
|
- **`docker`**: Docker and container tooling updates
|
||||||
|
|
||||||
|
### Workflow-Specific Branches
|
||||||
|
|
||||||
|
- **Node.js updates**: `chore/update-node` branch
|
||||||
|
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
|
||||||
|
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
|
||||||
|
- **Docker updates**: `feature/docker-buildx-upgrade` branch
|
||||||
|
|
||||||
|
## Special Considerations
|
||||||
|
|
||||||
|
### Node.js Updates
|
||||||
|
|
||||||
|
When updating Node.js versions, remember to:
|
||||||
|
|
||||||
|
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||||
|
2. Follow the alpine_nodejs getting started guide
|
||||||
|
3. Test container builds with new Node versions
|
||||||
|
|
||||||
|
### .NET SDK Updates
|
||||||
|
|
||||||
|
- Only patch versions are auto-updated within the same major.minor version
|
||||||
|
- Major/minor version updates require manual review and testing
|
||||||
|
|
||||||
|
### Docker Updates
|
||||||
|
|
||||||
|
- Updates include both Docker Engine and Docker Buildx
|
||||||
|
- Verify compatibility with runner container workflows
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **NPM Audit Workflow Fails**:
|
||||||
|
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
|
||||||
|
- Verify Node.js setup step succeeded
|
||||||
|
|
||||||
|
2. **Version Detection Fails**:
|
||||||
|
- Check if upstream APIs are available
|
||||||
|
- Verify parsing logic for version extraction
|
||||||
|
|
||||||
|
3. **PR Creation Fails**:
|
||||||
|
- Ensure `GITHUB_TOKEN` has sufficient permissions
|
||||||
|
- Check if branch already exists
|
||||||
|
|
||||||
|
### Contact
|
||||||
|
|
||||||
|
For questions about the dependency management process:
|
||||||
|
|
||||||
|
- Create an issue with the `dependencies` label
|
||||||
|
- Review existing dependency management workflows
|
||||||
|
- Consult the runner team for security-related concerns
|
||||||
|
|
||||||
|
## Metrics and KPIs
|
||||||
|
|
||||||
|
Track these metrics to measure dependency management effectiveness:
|
||||||
|
|
||||||
|
- Number of open dependency PRs at release time
|
||||||
|
- Time to merge dependency updates
|
||||||
|
- Number of security vulnerabilities by severity
|
||||||
|
- Release cycle adherence (monthly target)
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
## Supported Distributions and Versions
|
## Supported Distributions and Versions
|
||||||
|
|
||||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#linux)."
|
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)."
|
||||||
|
|
||||||
## Install .Net Core 3.x Linux Dependencies
|
## Install .Net Core 3.x Linux Dependencies
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,6 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#macos)."
|
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)."
|
||||||
|
|
||||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
||||||
|
|||||||
@@ -2,6 +2,6 @@
|
|||||||
|
|
||||||
## Supported Versions
|
## Supported Versions
|
||||||
|
|
||||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#windows)."
|
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)."
|
||||||
|
|
||||||
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
# Source: https://github.com/dotnet/dotnet-docker
|
# Source: https://github.com/dotnet/dotnet-docker
|
||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy as build
|
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble AS build
|
||||||
|
|
||||||
ARG TARGETOS
|
ARG TARGETOS
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG RUNNER_VERSION
|
ARG RUNNER_VERSION
|
||||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.6.1
|
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||||
ARG DOCKER_VERSION=27.4.1
|
ARG DOCKER_VERSION=29.0.2
|
||||||
ARG BUILDX_VERSION=0.19.3
|
ARG BUILDX_VERSION=0.30.1
|
||||||
|
|
||||||
RUN apt update -y && apt install curl unzip -y
|
RUN apt update -y && apt install curl unzip -y
|
||||||
|
|
||||||
@@ -21,6 +21,10 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
|||||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||||
&& rm runner-container-hooks.zip
|
&& rm runner-container-hooks.zip
|
||||||
|
|
||||||
|
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
|
||||||
|
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
|
||||||
|
&& rm runner-container-hooks.zip
|
||||||
|
|
||||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||||
@@ -29,15 +33,15 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
|
|||||||
&& rm -rf docker.tgz \
|
&& rm -rf docker.tgz \
|
||||||
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||||
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||||
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||||
|
|
||||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy
|
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||||
ENV ImageOS=ubuntu22
|
ENV ImageOS=ubuntu24
|
||||||
|
|
||||||
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
|
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
|
||||||
RUN apt update -y \
|
RUN apt update -y \
|
||||||
@@ -55,7 +59,8 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
|||||||
&& usermod -aG sudo runner \
|
&& usermod -aG sudo runner \
|
||||||
&& usermod -aG docker runner \
|
&& usermod -aG docker runner \
|
||||||
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
|
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
|
||||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
|
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
|
||||||
|
&& chmod 777 /home/runner
|
||||||
|
|
||||||
WORKDIR /home/runner
|
WORKDIR /home/runner
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,27 @@
|
|||||||
## What's Changed
|
## What's Changed
|
||||||
* Fix name of generated of artifact builds from GitHub workflow for arm artifacts by @satmandu in https://github.com/actions/runner/pull/3568
|
* Fix owner of /home/runner directory by @nikola-jokic in https://github.com/actions/runner/pull/4132
|
||||||
* Ignore error when fail to report worker crash. by @TingluoHuang in https://github.com/actions/runner/pull/3588
|
* Update Docker to v29.0.2 and Buildx to v0.30.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4135
|
||||||
* Fix null ref in 'OnEventWritten()' by @TingluoHuang in https://github.com/actions/runner/pull/3593
|
* Update workflow around runner docker image. by @TingluoHuang in https://github.com/actions/runner/pull/4133
|
||||||
* Send stepNumber for annotation to run-service by @TingluoHuang in https://github.com/actions/runner/pull/3614
|
* Fix regex for validating runner version format by @TingluoHuang in https://github.com/actions/runner/pull/4136
|
||||||
* Enable nuget audit. by @TingluoHuang in https://github.com/actions/runner/pull/3615
|
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4144
|
||||||
* Update dotnet install script. by @TingluoHuang in https://github.com/actions/runner/pull/3659
|
* Ensure safe_sleep tries alternative approaches by @TingluoHuang in https://github.com/actions/runner/pull/4146
|
||||||
* Print immutable action package details in set up job logs by @heavymachinery in https://github.com/actions/runner/pull/3645
|
* Bump actions/github-script from 7 to 8 by @dependabot[bot] in https://github.com/actions/runner/pull/4137
|
||||||
* Update dotnet sdk to latest version @8.0.405 by @github-actions in https://github.com/actions/runner/pull/3666
|
* Bump actions/checkout from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4130
|
||||||
* Upgrade `buildx` from `0.18.0` to `0.19.3` (critical CVE) by @MPV in https://github.com/actions/runner/pull/3647
|
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4149
|
||||||
* Upgrade `docker` from `27.3.1` to `27.4.1` by @MPV in https://github.com/actions/runner/pull/3648
|
* Bump docker image to use ubuntu 24.04 by @TingluoHuang in https://github.com/actions/runner/pull/4018
|
||||||
* Bump Microsoft.NET.Test.Sdk from 17.8.0 to 17.12.0 in /src by @dependabot in https://github.com/actions/runner/pull/3584
|
* Add support for case function by @AllanGuigou in https://github.com/actions/runner/pull/4147
|
||||||
* Bump docker/setup-buildx-action from 2 to 3 by @dependabot in https://github.com/actions/runner/pull/3564
|
* Cleanup feature flag actions_container_action_runner_temp by @ericsciple in https://github.com/actions/runner/pull/4163
|
||||||
* Bump github/codeql-action from 2 to 3 by @dependabot in https://github.com/actions/runner/pull/3555
|
* Bump actions/download-artifact from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4155
|
||||||
* Bump Moq from 4.20.70 to 4.20.72 in /src by @dependabot in https://github.com/actions/runner/pull/3672
|
* Bump actions/upload-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4157
|
||||||
|
* Set ACTIONS_ORCHESTRATION_ID as env to actions. by @TingluoHuang in https://github.com/actions/runner/pull/4178
|
||||||
|
* Allow hosted VM report job telemetry via .setup_info file. by @TingluoHuang in https://github.com/actions/runner/pull/4186
|
||||||
|
* Bump typescript from 5.9.2 to 5.9.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4184
|
||||||
|
* Bump Azure.Storage.Blobs from 12.26.0 to 12.27.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4189
|
||||||
|
|
||||||
## New Contributors
|
## New Contributors
|
||||||
* @satmandu made their first contribution in https://github.com/actions/runner/pull/3568
|
* @AllanGuigou made their first contribution in https://github.com/actions/runner/pull/4147
|
||||||
|
|
||||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.321.0...v2.322.0
|
**Full Changelog**: https://github.com/actions/runner/compare/v2.330.0...v2.331.0
|
||||||
|
|
||||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||||
|
|||||||
2
src/Misc/dotnet-install.sh
vendored
2
src/Misc/dotnet-install.sh
vendored
@@ -11,7 +11,7 @@ set -u
|
|||||||
# This is causing it to fail
|
# This is causing it to fail
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
# Use in the the functions: eval $invocation
|
# Use in the functions: eval $invocation
|
||||||
invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
|
invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
|
||||||
|
|
||||||
# standard output may be used as a return value in the functions
|
# standard output may be used as a return value in the functions
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"plugins": ["@typescript-eslint"],
|
"plugins": ["@typescript-eslint", "@stylistic"],
|
||||||
"extends": ["plugin:github/recommended"],
|
"extends": ["plugin:github/recommended"],
|
||||||
"parser": "@typescript-eslint/parser",
|
"parser": "@typescript-eslint/parser",
|
||||||
"parserOptions": {
|
"parserOptions": {
|
||||||
@@ -26,7 +26,7 @@
|
|||||||
],
|
],
|
||||||
"camelcase": "off",
|
"camelcase": "off",
|
||||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
"@stylistic/func-call-spacing": ["error", "never"],
|
||||||
"@typescript-eslint/no-array-constructor": "error",
|
"@typescript-eslint/no-array-constructor": "error",
|
||||||
"@typescript-eslint/no-empty-interface": "error",
|
"@typescript-eslint/no-empty-interface": "error",
|
||||||
"@typescript-eslint/no-explicit-any": "error",
|
"@typescript-eslint/no-explicit-any": "error",
|
||||||
@@ -47,8 +47,8 @@
|
|||||||
"@typescript-eslint/promise-function-async": "error",
|
"@typescript-eslint/promise-function-async": "error",
|
||||||
"@typescript-eslint/require-array-sort-compare": "error",
|
"@typescript-eslint/require-array-sort-compare": "error",
|
||||||
"@typescript-eslint/restrict-plus-operands": "error",
|
"@typescript-eslint/restrict-plus-operands": "error",
|
||||||
"@typescript-eslint/semi": ["error", "never"],
|
"@stylistic/semi": ["error", "never"],
|
||||||
"@typescript-eslint/type-annotation-spacing": "error",
|
"@stylistic/type-annotation-spacing": "error",
|
||||||
"@typescript-eslint/unbound-method": "error",
|
"@typescript-eslint/unbound-method": "error",
|
||||||
"filenames/match-regex" : "off",
|
"filenames/match-regex" : "off",
|
||||||
"github/no-then" : 1, // warning
|
"github/no-then" : 1, // warning
|
||||||
|
|||||||
2187
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
2187
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,8 +10,7 @@
|
|||||||
"lint": "eslint src/**/*.ts",
|
"lint": "eslint src/**/*.ts",
|
||||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||||
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||||
"prepare": "cd ../../../../ && husky install"
|
"prepare": "cd ../../../../ && husky"
|
||||||
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -36,16 +35,17 @@
|
|||||||
"@actions/glob": "^0.4.0"
|
"@actions/glob": "^0.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^20.6.2",
|
"@stylistic/eslint-plugin": "^3.1.0",
|
||||||
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
"@types/node": "^22.0.0",
|
||||||
"@typescript-eslint/parser": "^6.7.2",
|
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||||
"@vercel/ncc": "^0.38.0",
|
"@typescript-eslint/parser": "^8.0.0",
|
||||||
|
"@vercel/ncc": "^0.38.3",
|
||||||
"eslint": "^8.47.0",
|
"eslint": "^8.47.0",
|
||||||
"eslint-plugin-github": "^4.10.0",
|
"eslint-plugin-github": "^4.10.2",
|
||||||
"eslint-plugin-prettier": "^5.0.0",
|
"eslint-plugin-prettier": "^5.0.0",
|
||||||
|
"husky": "^9.1.7",
|
||||||
|
"lint-staged": "^15.5.0",
|
||||||
"prettier": "^3.0.3",
|
"prettier": "^3.0.3",
|
||||||
"typescript": "^5.2.2",
|
"typescript": "^5.9.3"
|
||||||
"husky": "^8.0.3",
|
|
||||||
"lint-staged": "^14.0.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -6,7 +6,8 @@ NODE_URL=https://nodejs.org/dist
|
|||||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||||
NODE20_VERSION="20.18.0"
|
NODE20_VERSION="20.19.6"
|
||||||
|
NODE24_VERSION="24.12.0"
|
||||||
|
|
||||||
get_abs_path() {
|
get_abs_path() {
|
||||||
# exploits the fact that pwd will print abs path when no args
|
# exploits the fact that pwd will print abs path when no args
|
||||||
@@ -139,6 +140,8 @@ function acquireExternalTool() {
|
|||||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -149,6 +152,8 @@ if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
|||||||
# todo: replace these with official release when available
|
# todo: replace these with official release when available
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||||
if [[ "$PRECACHE" != "" ]]; then
|
if [[ "$PRECACHE" != "" ]]; then
|
||||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||||
fi
|
fi
|
||||||
@@ -157,21 +162,26 @@ fi
|
|||||||
# Download the external tools only for OSX.
|
# Download the external tools only for OSX.
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-x64.tar.gz" node24 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||||
# node.js v12 doesn't support macOS on arm64.
|
# node.js v12 doesn't support macOS on arm64.
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-arm64.tar.gz" node24 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-x64.tar.gz" node24 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_ALPINE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-alpine-x64.tar.gz" node24_alpine
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||||
|
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-arm64.tar.gz" node24 fix_nested_dir
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description={{Description}}
|
Description={{Description}}
|
||||||
After=network.target
|
After=network-online.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart={{RunnerRoot}}/runsvc.sh
|
ExecStart={{RunnerRoot}}/runsvc.sh
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
/******/ (() => { // webpackBootstrap
|
/******/ (() => { // webpackBootstrap
|
||||||
/******/ var __webpack_modules__ = ({
|
/******/ var __webpack_modules__ = ({
|
||||||
|
|
||||||
/***/ 2627:
|
/***/ 4711:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -22,13 +22,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
}) : function(o, v) {
|
}) : function(o, v) {
|
||||||
o["default"] = v;
|
o["default"] = v;
|
||||||
});
|
});
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
if (mod && mod.__esModule) return mod;
|
var ownKeys = function(o) {
|
||||||
var result = {};
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
var ar = [];
|
||||||
__setModuleDefault(result, mod);
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
return result;
|
return ar;
|
||||||
};
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -46,15 +56,15 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const crypto = __importStar(__nccwpck_require__(6113));
|
const crypto = __importStar(__nccwpck_require__(6982));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(9896));
|
||||||
const glob = __importStar(__nccwpck_require__(8090));
|
const glob = __importStar(__nccwpck_require__(7206));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2203));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(9023));
|
||||||
function run() {
|
function run() {
|
||||||
var _a, e_1, _b, _c;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
var _a, e_1, _b, _c;
|
||||||
// arg0 -> node
|
// arg0 -> node
|
||||||
// arg1 -> hashFiles.js
|
// arg1 -> hashFiles.js
|
||||||
// env[followSymbolicLinks] = true/null
|
// env[followSymbolicLinks] = true/null
|
||||||
@@ -128,7 +138,7 @@ function run() {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 7351:
|
/***/ 4914:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -154,8 +164,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.issue = exports.issueCommand = void 0;
|
exports.issue = exports.issueCommand = void 0;
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(857));
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(302);
|
||||||
/**
|
/**
|
||||||
* Commands
|
* Commands
|
||||||
*
|
*
|
||||||
@@ -227,7 +237,7 @@ function escapeProperty(s) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2186:
|
/***/ 7484:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -262,12 +272,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||||||
const command_1 = __nccwpck_require__(7351);
|
const command_1 = __nccwpck_require__(4914);
|
||||||
const file_command_1 = __nccwpck_require__(717);
|
const file_command_1 = __nccwpck_require__(4753);
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(302);
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(857));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
const oidc_utils_1 = __nccwpck_require__(5306);
|
||||||
/**
|
/**
|
||||||
* The code to exit an action
|
* The code to exit an action
|
||||||
*/
|
*/
|
||||||
@@ -552,17 +562,17 @@ exports.getIDToken = getIDToken;
|
|||||||
/**
|
/**
|
||||||
* Summary exports
|
* Summary exports
|
||||||
*/
|
*/
|
||||||
var summary_1 = __nccwpck_require__(1327);
|
var summary_1 = __nccwpck_require__(1847);
|
||||||
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
||||||
/**
|
/**
|
||||||
* @deprecated use core.summary
|
* @deprecated use core.summary
|
||||||
*/
|
*/
|
||||||
var summary_2 = __nccwpck_require__(1327);
|
var summary_2 = __nccwpck_require__(1847);
|
||||||
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
||||||
/**
|
/**
|
||||||
* Path exports
|
* Path exports
|
||||||
*/
|
*/
|
||||||
var path_utils_1 = __nccwpck_require__(2981);
|
var path_utils_1 = __nccwpck_require__(1976);
|
||||||
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
||||||
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
||||||
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
||||||
@@ -570,7 +580,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 717:
|
/***/ 4753:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -599,10 +609,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||||||
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||||
// We use any as a valid input type
|
// We use any as a valid input type
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(9896));
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(857));
|
||||||
const uuid_1 = __nccwpck_require__(5840);
|
const uuid_1 = __nccwpck_require__(2048);
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(302);
|
||||||
function issueFileCommand(command, message) {
|
function issueFileCommand(command, message) {
|
||||||
const filePath = process.env[`GITHUB_${command}`];
|
const filePath = process.env[`GITHUB_${command}`];
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
@@ -635,7 +645,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 8041:
|
/***/ 5306:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -651,9 +661,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.OidcClient = void 0;
|
exports.OidcClient = void 0;
|
||||||
const http_client_1 = __nccwpck_require__(6255);
|
const http_client_1 = __nccwpck_require__(4844);
|
||||||
const auth_1 = __nccwpck_require__(5526);
|
const auth_1 = __nccwpck_require__(4552);
|
||||||
const core_1 = __nccwpck_require__(2186);
|
const core_1 = __nccwpck_require__(7484);
|
||||||
class OidcClient {
|
class OidcClient {
|
||||||
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
||||||
const requestOptions = {
|
const requestOptions = {
|
||||||
@@ -719,7 +729,7 @@ exports.OidcClient = OidcClient;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2981:
|
/***/ 1976:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -745,7 +755,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
/**
|
/**
|
||||||
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||||||
* replaced with /.
|
* replaced with /.
|
||||||
@@ -784,7 +794,7 @@ exports.toPlatformPath = toPlatformPath;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1327:
|
/***/ 1847:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -800,8 +810,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
||||||
const os_1 = __nccwpck_require__(2037);
|
const os_1 = __nccwpck_require__(857);
|
||||||
const fs_1 = __nccwpck_require__(7147);
|
const fs_1 = __nccwpck_require__(9896);
|
||||||
const { access, appendFile, writeFile } = fs_1.promises;
|
const { access, appendFile, writeFile } = fs_1.promises;
|
||||||
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
||||||
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
||||||
@@ -1074,7 +1084,7 @@ exports.summary = _summary;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5278:
|
/***/ 302:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1121,7 +1131,7 @@ exports.toCommandProperties = toCommandProperties;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 8090:
|
/***/ 7206:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1137,8 +1147,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.hashFiles = exports.create = void 0;
|
exports.hashFiles = exports.create = void 0;
|
||||||
const internal_globber_1 = __nccwpck_require__(8298);
|
const internal_globber_1 = __nccwpck_require__(103);
|
||||||
const internal_hash_files_1 = __nccwpck_require__(2448);
|
const internal_hash_files_1 = __nccwpck_require__(3608);
|
||||||
/**
|
/**
|
||||||
* Constructs a globber
|
* Constructs a globber
|
||||||
*
|
*
|
||||||
@@ -1174,7 +1184,7 @@ exports.hashFiles = hashFiles;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1026:
|
/***/ 8164:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1200,7 +1210,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.getOptions = void 0;
|
exports.getOptions = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(7484));
|
||||||
/**
|
/**
|
||||||
* Returns a copy with defaults filled in.
|
* Returns a copy with defaults filled in.
|
||||||
*/
|
*/
|
||||||
@@ -1236,7 +1246,7 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 8298:
|
/***/ 103:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1290,14 +1300,14 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.DefaultGlobber = void 0;
|
exports.DefaultGlobber = void 0;
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(7484));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(9896));
|
||||||
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
const globOptionsHelper = __importStar(__nccwpck_require__(8164));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const patternHelper = __importStar(__nccwpck_require__(9005));
|
const patternHelper = __importStar(__nccwpck_require__(8891));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
const internal_pattern_1 = __nccwpck_require__(5370);
|
||||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
const internal_search_state_1 = __nccwpck_require__(9890);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
class DefaultGlobber {
|
class DefaultGlobber {
|
||||||
constructor(options) {
|
constructor(options) {
|
||||||
@@ -1478,7 +1488,7 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2448:
|
/***/ 3608:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1520,12 +1530,12 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.hashFiles = void 0;
|
exports.hashFiles = void 0;
|
||||||
const crypto = __importStar(__nccwpck_require__(6113));
|
const crypto = __importStar(__nccwpck_require__(6982));
|
||||||
const core = __importStar(__nccwpck_require__(2186));
|
const core = __importStar(__nccwpck_require__(7484));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(9896));
|
||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2203));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(9023));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
function hashFiles(globber, currentWorkspace, verbose = false) {
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
var e_1, _a;
|
var e_1, _a;
|
||||||
var _b;
|
var _b;
|
||||||
@@ -1582,7 +1592,7 @@ exports.hashFiles = hashFiles;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1063:
|
/***/ 2644:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1607,7 +1617,7 @@ var MatchKind;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1849:
|
/***/ 4138:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1636,8 +1646,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||||
@@ -1812,7 +1822,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6836:
|
/***/ 6617:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1841,9 +1851,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.Path = void 0;
|
exports.Path = void 0;
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Helper class for parsing paths into segments
|
* Helper class for parsing paths into segments
|
||||||
@@ -1932,7 +1942,7 @@ exports.Path = Path;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9005:
|
/***/ 8891:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -1958,8 +1968,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Given an array of patterns, returns an array of paths to search.
|
* Given an array of patterns, returns an array of paths to search.
|
||||||
@@ -2033,7 +2043,7 @@ exports.partialMatch = partialMatch;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4536:
|
/***/ 5370:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -2062,13 +2072,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.Pattern = void 0;
|
exports.Pattern = void 0;
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(857));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(6928));
|
||||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||||
const minimatch_1 = __nccwpck_require__(3973);
|
const minimatch_1 = __nccwpck_require__(3772);
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||||
const internal_path_1 = __nccwpck_require__(6836);
|
const internal_path_1 = __nccwpck_require__(6617);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
class Pattern {
|
class Pattern {
|
||||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||||
@@ -2295,7 +2305,7 @@ exports.Pattern = Pattern;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9117:
|
/***/ 9890:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -2313,7 +2323,7 @@ exports.SearchState = SearchState;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5526:
|
/***/ 4552:
|
||||||
/***/ (function(__unused_webpack_module, exports) {
|
/***/ (function(__unused_webpack_module, exports) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -2401,7 +2411,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6255:
|
/***/ 4844:
|
||||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -2437,10 +2447,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||||||
const http = __importStar(__nccwpck_require__(3685));
|
const http = __importStar(__nccwpck_require__(8611));
|
||||||
const https = __importStar(__nccwpck_require__(5687));
|
const https = __importStar(__nccwpck_require__(5692));
|
||||||
const pm = __importStar(__nccwpck_require__(9835));
|
const pm = __importStar(__nccwpck_require__(4988));
|
||||||
const tunnel = __importStar(__nccwpck_require__(4294));
|
const tunnel = __importStar(__nccwpck_require__(770));
|
||||||
var HttpCodes;
|
var HttpCodes;
|
||||||
(function (HttpCodes) {
|
(function (HttpCodes) {
|
||||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||||
@@ -3026,7 +3036,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9835:
|
/***/ 4988:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -3115,7 +3125,7 @@ function isLoopbackAddress(host) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9417:
|
/***/ 9380:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -3185,11 +3195,11 @@ function range(a, b, str) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 3717:
|
/***/ 4691:
|
||||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
var concatMap = __nccwpck_require__(6891);
|
var concatMap = __nccwpck_require__(7087);
|
||||||
var balanced = __nccwpck_require__(9417);
|
var balanced = __nccwpck_require__(9380);
|
||||||
|
|
||||||
module.exports = expandTop;
|
module.exports = expandTop;
|
||||||
|
|
||||||
@@ -3299,7 +3309,7 @@ function expand(str, isTop) {
|
|||||||
var isOptions = m.body.indexOf(',') >= 0;
|
var isOptions = m.body.indexOf(',') >= 0;
|
||||||
if (!isSequence && !isOptions) {
|
if (!isSequence && !isOptions) {
|
||||||
// {a},b}
|
// {a},b}
|
||||||
if (m.post.match(/,.*\}/)) {
|
if (m.post.match(/,(?!,).*\}/)) {
|
||||||
str = m.pre + '{' + m.body + escClose + m.post;
|
str = m.pre + '{' + m.body + escClose + m.post;
|
||||||
return expand(str);
|
return expand(str);
|
||||||
}
|
}
|
||||||
@@ -3393,7 +3403,7 @@ function expand(str, isTop) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6891:
|
/***/ 7087:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
module.exports = function (xs, fn) {
|
module.exports = function (xs, fn) {
|
||||||
@@ -3413,19 +3423,19 @@ var isArray = Array.isArray || function (xs) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 3973:
|
/***/ 3772:
|
||||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
module.exports = minimatch
|
module.exports = minimatch
|
||||||
minimatch.Minimatch = Minimatch
|
minimatch.Minimatch = Minimatch
|
||||||
|
|
||||||
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
|
var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
|
||||||
sep: '/'
|
sep: '/'
|
||||||
}
|
}
|
||||||
minimatch.sep = path.sep
|
minimatch.sep = path.sep
|
||||||
|
|
||||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||||
var expand = __nccwpck_require__(3717)
|
var expand = __nccwpck_require__(4691)
|
||||||
|
|
||||||
var plTypes = {
|
var plTypes = {
|
||||||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||||
@@ -4367,27 +4377,27 @@ function regExpEscape (s) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4294:
|
/***/ 770:
|
||||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
module.exports = __nccwpck_require__(4219);
|
module.exports = __nccwpck_require__(218);
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4219:
|
/***/ 218:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
var net = __nccwpck_require__(1808);
|
var net = __nccwpck_require__(9278);
|
||||||
var tls = __nccwpck_require__(4404);
|
var tls = __nccwpck_require__(4756);
|
||||||
var http = __nccwpck_require__(3685);
|
var http = __nccwpck_require__(8611);
|
||||||
var https = __nccwpck_require__(5687);
|
var https = __nccwpck_require__(5692);
|
||||||
var events = __nccwpck_require__(2361);
|
var events = __nccwpck_require__(4434);
|
||||||
var assert = __nccwpck_require__(9491);
|
var assert = __nccwpck_require__(2613);
|
||||||
var util = __nccwpck_require__(3837);
|
var util = __nccwpck_require__(9023);
|
||||||
|
|
||||||
|
|
||||||
exports.httpOverHttp = httpOverHttp;
|
exports.httpOverHttp = httpOverHttp;
|
||||||
@@ -4647,7 +4657,7 @@ exports.debug = debug; // for test
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5840:
|
/***/ 2048:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4711,29 +4721,29 @@ Object.defineProperty(exports, "parse", ({
|
|||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
var _v = _interopRequireDefault(__nccwpck_require__(8628));
|
var _v = _interopRequireDefault(__nccwpck_require__(6415));
|
||||||
|
|
||||||
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
|
var _v2 = _interopRequireDefault(__nccwpck_require__(1697));
|
||||||
|
|
||||||
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
|
var _v3 = _interopRequireDefault(__nccwpck_require__(4676));
|
||||||
|
|
||||||
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
|
var _v4 = _interopRequireDefault(__nccwpck_require__(9771));
|
||||||
|
|
||||||
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
|
var _nil = _interopRequireDefault(__nccwpck_require__(7723));
|
||||||
|
|
||||||
var _version = _interopRequireDefault(__nccwpck_require__(1595));
|
var _version = _interopRequireDefault(__nccwpck_require__(5868));
|
||||||
|
|
||||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||||
|
|
||||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||||
|
|
||||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4569:
|
/***/ 216:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4744,7 +4754,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -4763,7 +4773,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5332:
|
/***/ 7723:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4778,7 +4788,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2746:
|
/***/ 7267:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4789,7 +4799,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -4830,7 +4840,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 814:
|
/***/ 7879:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4845,7 +4855,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 807:
|
/***/ 2973:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4856,7 +4866,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = rng;
|
exports["default"] = rng;
|
||||||
|
|
||||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -4876,7 +4886,7 @@ function rng() {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5274:
|
/***/ 507:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4887,7 +4897,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -4906,7 +4916,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 8950:
|
/***/ 7597:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4917,7 +4927,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -4952,7 +4962,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 8628:
|
/***/ 6415:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -4963,9 +4973,9 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||||
|
|
||||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5066,7 +5076,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6409:
|
/***/ 1697:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5077,9 +5087,9 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||||
|
|
||||||
var _md = _interopRequireDefault(__nccwpck_require__(4569));
|
var _md = _interopRequireDefault(__nccwpck_require__(216));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5089,7 +5099,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5998:
|
/***/ 2930:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5101,9 +5111,9 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
exports["default"] = _default;
|
exports["default"] = _default;
|
||||||
exports.URL = exports.DNS = void 0;
|
exports.URL = exports.DNS = void 0;
|
||||||
|
|
||||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||||
|
|
||||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5174,7 +5184,7 @@ function _default(name, version, hashfunc) {
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5122:
|
/***/ 4676:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5185,9 +5195,9 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||||
|
|
||||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5218,7 +5228,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9120:
|
/***/ 9771:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5229,9 +5239,9 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||||
|
|
||||||
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
|
var _sha = _interopRequireDefault(__nccwpck_require__(507));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5241,7 +5251,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6900:
|
/***/ 6200:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5252,7 +5262,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _regex = _interopRequireDefault(__nccwpck_require__(814));
|
var _regex = _interopRequireDefault(__nccwpck_require__(7879));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5265,7 +5275,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1595:
|
/***/ 5868:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5276,7 +5286,7 @@ Object.defineProperty(exports, "__esModule", ({
|
|||||||
}));
|
}));
|
||||||
exports["default"] = void 0;
|
exports["default"] = void 0;
|
||||||
|
|
||||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||||
|
|
||||||
@@ -5293,7 +5303,7 @@ exports["default"] = _default;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9491:
|
/***/ 2613:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5301,7 +5311,7 @@ module.exports = require("assert");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6113:
|
/***/ 6982:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5309,7 +5319,7 @@ module.exports = require("crypto");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2361:
|
/***/ 4434:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5317,7 +5327,7 @@ module.exports = require("events");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 7147:
|
/***/ 9896:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5325,7 +5335,7 @@ module.exports = require("fs");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 3685:
|
/***/ 8611:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5333,7 +5343,7 @@ module.exports = require("http");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 5687:
|
/***/ 5692:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5341,7 +5351,7 @@ module.exports = require("https");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1808:
|
/***/ 9278:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5349,7 +5359,7 @@ module.exports = require("net");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2037:
|
/***/ 857:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5357,7 +5367,7 @@ module.exports = require("os");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1017:
|
/***/ 6928:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5365,7 +5375,7 @@ module.exports = require("path");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 2781:
|
/***/ 2203:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5373,7 +5383,7 @@ module.exports = require("stream");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4404:
|
/***/ 4756:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5381,7 +5391,7 @@ module.exports = require("tls");
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 3837:
|
/***/ 9023:
|
||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
@@ -5431,7 +5441,7 @@ module.exports = require("util");
|
|||||||
/******/ // startup
|
/******/ // startup
|
||||||
/******/ // Load entry module and return exports
|
/******/ // Load entry module and return exports
|
||||||
/******/ // This entry module is referenced by other modules so it can't be inlined
|
/******/ // This entry module is referenced by other modules so it can't be inlined
|
||||||
/******/ var __webpack_exports__ = __nccwpck_require__(2627);
|
/******/ var __webpack_exports__ = __nccwpck_require__(4711);
|
||||||
/******/ module.exports = __webpack_exports__;
|
/******/ module.exports = __webpack_exports__;
|
||||||
/******/
|
/******/
|
||||||
/******/ })()
|
/******/ })()
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||||
if [ $? -ne 0 ]
|
if [ $? -ne 0 ]
|
||||||
then
|
then
|
||||||
echo "'$apt_get' failed with exit code '$?'"
|
echo "'$apt_get' failed with exit code '$?'"
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ fi
|
|||||||
# fix upgrade issue with macOS when running as a service
|
# fix upgrade issue with macOS when running as a service
|
||||||
attemptedtargetedfix=0
|
attemptedtargetedfix=0
|
||||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; then
|
||||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||||
@@ -135,16 +135,22 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
|||||||
then
|
then
|
||||||
# inspect the open file handles to find the node process
|
# inspect the open file handles to find the node process
|
||||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||||
nodever="node20"
|
# Try finding node24 first, then fallback to earlier versions if needed
|
||||||
|
nodever="node24"
|
||||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
|
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node20
|
||||||
then
|
then
|
||||||
nodever="node16"
|
nodever="node20"
|
||||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
|
||||||
then
|
then
|
||||||
nodever="node12"
|
nodever="node16"
|
||||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||||
|
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||||
|
then
|
||||||
|
nodever="node12"
|
||||||
|
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
if [[ $? -eq 0 && -n "$path" ]]
|
if [[ $? -eq 0 && -n "$path" ]]
|
||||||
|
|||||||
@@ -1,6 +1,37 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# try to use sleep if available
|
||||||
|
if [ -x "$(command -v sleep)" ]; then
|
||||||
|
sleep "$1"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# try to use ping if available
|
||||||
|
if [ -x "$(command -v ping)" ]; then
|
||||||
|
ping -c $(( $1 + 1 )) 127.0.0.1 > /dev/null
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# try to use read -t from stdin/stdout/stderr if we are in bash
|
||||||
|
if [ -n "$BASH_VERSION" ]; then
|
||||||
|
if command -v read >/dev/null 2>&1; then
|
||||||
|
if [ -t 0 ]; then
|
||||||
|
read -t "$1" -u 0 || :;
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
if [ -t 1 ]; then
|
||||||
|
read -t "$1" -u 1 || :;
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
if [ -t 2 ]; then
|
||||||
|
read -t "$1" -u 2 || :;
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# fallback to a busy wait
|
||||||
SECONDS=0
|
SECONDS=0
|
||||||
while [[ $SECONDS != $1 ]]; do
|
while [[ $SECONDS -lt $1 ]]; do
|
||||||
:
|
:
|
||||||
done
|
done
|
||||||
|
|||||||
13
src/Runner.Common/AuthMigration.cs
Normal file
13
src/Runner.Common/AuthMigration.cs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
using System;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
public class AuthMigrationEventArgs : EventArgs
|
||||||
|
{
|
||||||
|
public AuthMigrationEventArgs(string trace)
|
||||||
|
{
|
||||||
|
Trace = trace;
|
||||||
|
}
|
||||||
|
public string Trace { get; private set; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -23,6 +23,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||||
|
|
||||||
|
Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token);
|
||||||
|
|
||||||
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
||||||
|
|
||||||
Task ForceRefreshConnection(VssCredentials credentials);
|
Task ForceRefreshConnection(VssCredentials credentials);
|
||||||
@@ -37,6 +39,7 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
|
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
|
||||||
{
|
{
|
||||||
|
Trace.Entering();
|
||||||
_brokerUri = serverUri;
|
_brokerUri = serverUri;
|
||||||
|
|
||||||
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
|
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
|
||||||
@@ -66,10 +69,17 @@ namespace GitHub.Runner.Common
|
|||||||
var brokerSession = RetryRequest<TaskAgentMessage>(
|
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
||||||
|
|
||||||
|
|
||||||
return brokerSession;
|
return brokerSession;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection();
|
||||||
|
|
||||||
|
// No retries
|
||||||
|
await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
@@ -88,12 +98,17 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
public Task ForceRefreshConnection(VssCredentials credentials)
|
public Task ForceRefreshConnection(VssCredentials credentials)
|
||||||
{
|
{
|
||||||
return ConnectAsync(_brokerUri, credentials);
|
if (!string.IsNullOrEmpty(_brokerUri?.AbsoluteUri))
|
||||||
|
{
|
||||||
|
return ConnectAsync(_brokerUri, credentials);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Task.CompletedTask;
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool ShouldRetryException(Exception ex)
|
public bool ShouldRetryException(Exception ex)
|
||||||
{
|
{
|
||||||
if (ex is AccessDeniedException || ex is RunnerNotFoundException)
|
if (ex is AccessDeniedException || ex is RunnerNotFoundException || ex is HostedRunnerDeprovisionedException)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
using GitHub.Runner.Sdk;
|
using System;
|
||||||
using System;
|
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Runtime.Serialization;
|
using System.Runtime.Serialization;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
@@ -53,6 +53,9 @@ namespace GitHub.Runner.Common
|
|||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public bool UseV2Flow { get; set; }
|
public bool UseV2Flow { get; set; }
|
||||||
|
|
||||||
|
[DataMember(EmitDefaultValue = false)]
|
||||||
|
public bool UseRunnerAdminFlow { get; set; }
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public string ServerUrlV2 { get; set; }
|
public string ServerUrlV2 { get; set; }
|
||||||
|
|
||||||
@@ -61,8 +64,20 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
get
|
get
|
||||||
{
|
{
|
||||||
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
// If the value has been explicitly set, return it.
|
||||||
return _isHostedServer ?? true;
|
if (_isHostedServer.HasValue)
|
||||||
|
{
|
||||||
|
return _isHostedServer.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, try to infer it from the GitHubUrl.
|
||||||
|
if (!string.IsNullOrEmpty(GitHubUrl))
|
||||||
|
{
|
||||||
|
return UrlUtil.IsHostedServer(new UriBuilder(GitHubUrl));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to true since Hosted runners likely don't have this property set.
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
set
|
set
|
||||||
@@ -116,11 +131,15 @@ namespace GitHub.Runner.Common
|
|||||||
bool IsConfigured();
|
bool IsConfigured();
|
||||||
bool IsServiceConfigured();
|
bool IsServiceConfigured();
|
||||||
bool HasCredentials();
|
bool HasCredentials();
|
||||||
|
bool IsMigratedConfigured();
|
||||||
CredentialData GetCredentials();
|
CredentialData GetCredentials();
|
||||||
CredentialData GetMigratedCredentials();
|
CredentialData GetMigratedCredentials();
|
||||||
RunnerSettings GetSettings();
|
RunnerSettings GetSettings();
|
||||||
|
RunnerSettings GetMigratedSettings();
|
||||||
void SaveCredential(CredentialData credential);
|
void SaveCredential(CredentialData credential);
|
||||||
|
void SaveMigratedCredential(CredentialData credential);
|
||||||
void SaveSettings(RunnerSettings settings);
|
void SaveSettings(RunnerSettings settings);
|
||||||
|
void SaveMigratedSettings(RunnerSettings settings);
|
||||||
void DeleteCredential();
|
void DeleteCredential();
|
||||||
void DeleteMigratedCredential();
|
void DeleteMigratedCredential();
|
||||||
void DeleteSettings();
|
void DeleteSettings();
|
||||||
@@ -130,6 +149,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
private string _binPath;
|
private string _binPath;
|
||||||
private string _configFilePath;
|
private string _configFilePath;
|
||||||
|
private string _migratedConfigFilePath;
|
||||||
private string _credFilePath;
|
private string _credFilePath;
|
||||||
private string _migratedCredFilePath;
|
private string _migratedCredFilePath;
|
||||||
private string _serviceConfigFilePath;
|
private string _serviceConfigFilePath;
|
||||||
@@ -137,6 +157,7 @@ namespace GitHub.Runner.Common
|
|||||||
private CredentialData _creds;
|
private CredentialData _creds;
|
||||||
private CredentialData _migratedCreds;
|
private CredentialData _migratedCreds;
|
||||||
private RunnerSettings _settings;
|
private RunnerSettings _settings;
|
||||||
|
private RunnerSettings _migratedSettings;
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
@@ -154,6 +175,9 @@ namespace GitHub.Runner.Common
|
|||||||
_configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
_configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
||||||
Trace.Info("ConfigFilePath: {0}", _configFilePath);
|
Trace.Info("ConfigFilePath: {0}", _configFilePath);
|
||||||
|
|
||||||
|
_migratedConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedRunner);
|
||||||
|
Trace.Info("MigratedConfigFilePath: {0}", _migratedConfigFilePath);
|
||||||
|
|
||||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||||
|
|
||||||
@@ -169,7 +193,7 @@ namespace GitHub.Runner.Common
|
|||||||
public bool HasCredentials()
|
public bool HasCredentials()
|
||||||
{
|
{
|
||||||
Trace.Info("HasCredentials()");
|
Trace.Info("HasCredentials()");
|
||||||
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
bool credsStored = new FileInfo(_credFilePath).Exists || new FileInfo(_migratedCredFilePath).Exists;
|
||||||
Trace.Info("stored {0}", credsStored);
|
Trace.Info("stored {0}", credsStored);
|
||||||
return credsStored;
|
return credsStored;
|
||||||
}
|
}
|
||||||
@@ -177,7 +201,7 @@ namespace GitHub.Runner.Common
|
|||||||
public bool IsConfigured()
|
public bool IsConfigured()
|
||||||
{
|
{
|
||||||
Trace.Info("IsConfigured()");
|
Trace.Info("IsConfigured()");
|
||||||
bool configured = new FileInfo(_configFilePath).Exists;
|
bool configured = new FileInfo(_configFilePath).Exists || new FileInfo(_migratedConfigFilePath).Exists;
|
||||||
Trace.Info("IsConfigured: {0}", configured);
|
Trace.Info("IsConfigured: {0}", configured);
|
||||||
return configured;
|
return configured;
|
||||||
}
|
}
|
||||||
@@ -185,11 +209,19 @@ namespace GitHub.Runner.Common
|
|||||||
public bool IsServiceConfigured()
|
public bool IsServiceConfigured()
|
||||||
{
|
{
|
||||||
Trace.Info("IsServiceConfigured()");
|
Trace.Info("IsServiceConfigured()");
|
||||||
bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists;
|
bool serviceConfigured = new FileInfo(_serviceConfigFilePath).Exists;
|
||||||
Trace.Info($"IsServiceConfigured: {serviceConfigured}");
|
Trace.Info($"IsServiceConfigured: {serviceConfigured}");
|
||||||
return serviceConfigured;
|
return serviceConfigured;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public bool IsMigratedConfigured()
|
||||||
|
{
|
||||||
|
Trace.Info("IsMigratedConfigured()");
|
||||||
|
bool configured = new FileInfo(_migratedConfigFilePath).Exists;
|
||||||
|
Trace.Info("IsMigratedConfigured: {0}", configured);
|
||||||
|
return configured;
|
||||||
|
}
|
||||||
|
|
||||||
public CredentialData GetCredentials()
|
public CredentialData GetCredentials()
|
||||||
{
|
{
|
||||||
if (_creds == null)
|
if (_creds == null)
|
||||||
@@ -229,6 +261,25 @@ namespace GitHub.Runner.Common
|
|||||||
return _settings;
|
return _settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public RunnerSettings GetMigratedSettings()
|
||||||
|
{
|
||||||
|
if (_migratedSettings == null)
|
||||||
|
{
|
||||||
|
RunnerSettings configuredSettings = null;
|
||||||
|
if (File.Exists(_migratedConfigFilePath))
|
||||||
|
{
|
||||||
|
string json = File.ReadAllText(_migratedConfigFilePath, Encoding.UTF8);
|
||||||
|
Trace.Info($"Read migrated setting file: {json.Length} chars");
|
||||||
|
configuredSettings = StringUtil.ConvertFromJson<RunnerSettings>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
ArgUtil.NotNull(configuredSettings, nameof(configuredSettings));
|
||||||
|
_migratedSettings = configuredSettings;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _migratedSettings;
|
||||||
|
}
|
||||||
|
|
||||||
public void SaveCredential(CredentialData credential)
|
public void SaveCredential(CredentialData credential)
|
||||||
{
|
{
|
||||||
Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath);
|
Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath);
|
||||||
@@ -244,6 +295,21 @@ namespace GitHub.Runner.Common
|
|||||||
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
|
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void SaveMigratedCredential(CredentialData credential)
|
||||||
|
{
|
||||||
|
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
|
||||||
|
if (File.Exists(_migratedCredFilePath))
|
||||||
|
{
|
||||||
|
// Delete existing credential file first, since the file is hidden and not able to overwrite.
|
||||||
|
Trace.Info("Delete exist runner migrated credential file.");
|
||||||
|
IOUtil.DeleteFile(_migratedCredFilePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
IOUtil.SaveObject(credential, _migratedCredFilePath);
|
||||||
|
Trace.Info("Migrated Credentials Saved.");
|
||||||
|
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
|
||||||
|
}
|
||||||
|
|
||||||
public void SaveSettings(RunnerSettings settings)
|
public void SaveSettings(RunnerSettings settings)
|
||||||
{
|
{
|
||||||
Trace.Info("Saving runner settings.");
|
Trace.Info("Saving runner settings.");
|
||||||
@@ -259,6 +325,21 @@ namespace GitHub.Runner.Common
|
|||||||
File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden);
|
File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void SaveMigratedSettings(RunnerSettings settings)
|
||||||
|
{
|
||||||
|
Trace.Info("Saving runner migrated settings");
|
||||||
|
if (File.Exists(_migratedConfigFilePath))
|
||||||
|
{
|
||||||
|
// Delete existing settings file first, since the file is hidden and not able to overwrite.
|
||||||
|
Trace.Info("Delete exist runner migrated settings file.");
|
||||||
|
IOUtil.DeleteFile(_migratedConfigFilePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
IOUtil.SaveObject(settings, _migratedConfigFilePath);
|
||||||
|
Trace.Info("Migrated Settings Saved.");
|
||||||
|
File.SetAttributes(_migratedConfigFilePath, File.GetAttributes(_migratedConfigFilePath) | FileAttributes.Hidden);
|
||||||
|
}
|
||||||
|
|
||||||
public void DeleteCredential()
|
public void DeleteCredential()
|
||||||
{
|
{
|
||||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||||
@@ -273,6 +354,12 @@ namespace GitHub.Runner.Common
|
|||||||
public void DeleteSettings()
|
public void DeleteSettings()
|
||||||
{
|
{
|
||||||
IOUtil.Delete(_configFilePath, default(CancellationToken));
|
IOUtil.Delete(_configFilePath, default(CancellationToken));
|
||||||
|
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void DeleteMigratedSettings()
|
||||||
|
{
|
||||||
|
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ namespace GitHub.Runner.Common
|
|||||||
public enum WellKnownConfigFile
|
public enum WellKnownConfigFile
|
||||||
{
|
{
|
||||||
Runner,
|
Runner,
|
||||||
|
MigratedRunner,
|
||||||
Credentials,
|
Credentials,
|
||||||
MigratedCredentials,
|
MigratedCredentials,
|
||||||
RSACredentials,
|
RSACredentials,
|
||||||
@@ -154,6 +155,10 @@ namespace GitHub.Runner.Common
|
|||||||
public const int RunnerUpdating = 3;
|
public const int RunnerUpdating = 3;
|
||||||
public const int RunOnceRunnerUpdating = 4;
|
public const int RunOnceRunnerUpdating = 4;
|
||||||
public const int SessionConflict = 5;
|
public const int SessionConflict = 5;
|
||||||
|
// Temporary error code to indicate that the runner configuration has been refreshed
|
||||||
|
// and the runner should be restarted. This is a temporary code and will be removed in the future after
|
||||||
|
// the runner is migrated to runner admin.
|
||||||
|
public const int RunnerConfigurationRefreshed = 6;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Features
|
public static class Features
|
||||||
@@ -162,6 +167,28 @@ namespace GitHub.Runner.Common
|
|||||||
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
||||||
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
||||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||||
|
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
|
||||||
|
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
|
||||||
|
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
|
||||||
|
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
|
||||||
|
public static readonly string CompareWorkflowParser = "actions_runner_compare_workflow_parser";
|
||||||
|
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node version migration related constants
|
||||||
|
public static class NodeMigration
|
||||||
|
{
|
||||||
|
// Node versions
|
||||||
|
public static readonly string Node20 = "node20";
|
||||||
|
public static readonly string Node24 = "node24";
|
||||||
|
|
||||||
|
// Environment variables for controlling node version selection
|
||||||
|
public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24";
|
||||||
|
public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||||
|
|
||||||
|
// Feature flags for controlling the migration phases
|
||||||
|
public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault";
|
||||||
|
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ using System.Threading.Tasks;
|
|||||||
using GitHub.DistributedTask.Logging;
|
using GitHub.DistributedTask.Logging;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.WebApi.Jwt;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
@@ -37,6 +38,11 @@ namespace GitHub.Runner.Common
|
|||||||
void ShutdownRunner(ShutdownReason reason);
|
void ShutdownRunner(ShutdownReason reason);
|
||||||
void WritePerfCounter(string counter);
|
void WritePerfCounter(string counter);
|
||||||
void LoadDefaultUserAgents();
|
void LoadDefaultUserAgents();
|
||||||
|
|
||||||
|
bool AllowAuthMigration { get; }
|
||||||
|
void EnableAuthMigration(string trace);
|
||||||
|
void DeferAuthMigration(TimeSpan deferred, string trace);
|
||||||
|
event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum StartupType
|
public enum StartupType
|
||||||
@@ -70,12 +76,21 @@ namespace GitHub.Runner.Common
|
|||||||
private RunnerWebProxy _webProxy = new();
|
private RunnerWebProxy _webProxy = new();
|
||||||
private string _hostType = string.Empty;
|
private string _hostType = string.Empty;
|
||||||
|
|
||||||
|
// disable auth migration by default
|
||||||
|
private readonly ManualResetEventSlim _allowAuthMigration = new ManualResetEventSlim(false);
|
||||||
|
private DateTime _deferredAuthMigrationTime = DateTime.MaxValue;
|
||||||
|
private readonly object _authMigrationLock = new object();
|
||||||
|
private CancellationTokenSource _authMigrationAutoReenableTaskCancellationTokenSource = new();
|
||||||
|
private Task _authMigrationAutoReenableTask;
|
||||||
|
|
||||||
public event EventHandler Unloading;
|
public event EventHandler Unloading;
|
||||||
|
public event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||||
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
||||||
public ShutdownReason RunnerShutdownReason { get; private set; }
|
public ShutdownReason RunnerShutdownReason { get; private set; }
|
||||||
public ISecretMasker SecretMasker => _secretMasker;
|
public ISecretMasker SecretMasker => _secretMasker;
|
||||||
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
||||||
public RunnerWebProxy WebProxy => _webProxy;
|
public RunnerWebProxy WebProxy => _webProxy;
|
||||||
|
public bool AllowAuthMigration => _allowAuthMigration.IsSet;
|
||||||
public HostContext(string hostType, string logFile = null)
|
public HostContext(string hostType, string logFile = null)
|
||||||
{
|
{
|
||||||
// Validate args.
|
// Validate args.
|
||||||
@@ -207,6 +222,71 @@ namespace GitHub.Runner.Common
|
|||||||
LoadDefaultUserAgents();
|
LoadDefaultUserAgents();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// marked as internal for testing
|
||||||
|
internal async Task AuthMigrationAuthReenableAsync(TimeSpan refreshInterval, CancellationToken token)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
while (!token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
_trace.Verbose($"Auth migration defer timer is set to expire at {_deferredAuthMigrationTime.ToString("O")}. AllowAuthMigration: {_allowAuthMigration.IsSet}.");
|
||||||
|
await Task.Delay(refreshInterval, token);
|
||||||
|
if (!_allowAuthMigration.IsSet && DateTime.UtcNow > _deferredAuthMigrationTime)
|
||||||
|
{
|
||||||
|
_trace.Info($"Auth migration defer timer expired. Allowing auth migration.");
|
||||||
|
EnableAuthMigration("Auth migration defer timer expired.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (TaskCanceledException)
|
||||||
|
{
|
||||||
|
// Task was cancelled, exit the loop.
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_trace.Info("Error in auth migration reenable task.");
|
||||||
|
_trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void EnableAuthMigration(string trace)
|
||||||
|
{
|
||||||
|
_allowAuthMigration.Set();
|
||||||
|
|
||||||
|
lock (_authMigrationLock)
|
||||||
|
{
|
||||||
|
if (_authMigrationAutoReenableTask == null)
|
||||||
|
{
|
||||||
|
var refreshIntervalInMS = 60 * 1000;
|
||||||
|
#if DEBUG
|
||||||
|
// For L0, we will refresh faster
|
||||||
|
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL")))
|
||||||
|
{
|
||||||
|
refreshIntervalInMS = int.Parse(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL"));
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
_authMigrationAutoReenableTask = AuthMigrationAuthReenableAsync(TimeSpan.FromMilliseconds(refreshIntervalInMS), _authMigrationAutoReenableTaskCancellationTokenSource.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_trace.Info($"Enable auth migration at {DateTime.UtcNow.ToString("O")}.");
|
||||||
|
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void DeferAuthMigration(TimeSpan deferred, string trace)
|
||||||
|
{
|
||||||
|
_allowAuthMigration.Reset();
|
||||||
|
|
||||||
|
// defer migration for a while
|
||||||
|
lock (_authMigrationLock)
|
||||||
|
{
|
||||||
|
_deferredAuthMigrationTime = DateTime.UtcNow.Add(deferred);
|
||||||
|
}
|
||||||
|
|
||||||
|
_trace.Info($"Disabled auth migration until {_deferredAuthMigrationTime.ToString("O")}.");
|
||||||
|
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||||
|
}
|
||||||
|
|
||||||
public void LoadDefaultUserAgents()
|
public void LoadDefaultUserAgents()
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress))
|
if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress))
|
||||||
@@ -227,6 +307,36 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
|
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// for Hosted runner, we can pull orchestrationId from JWT claims of the runner listening token.
|
||||||
|
if (credData != null &&
|
||||||
|
credData.Scheme == Constants.Configuration.OAuthAccessToken &&
|
||||||
|
credData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out var accessToken) &&
|
||||||
|
!string.IsNullOrEmpty(accessToken))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var jwt = JsonWebToken.Create(accessToken);
|
||||||
|
var claims = jwt.ExtractClaims();
|
||||||
|
var orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||||
|
if (string.IsNullOrEmpty(orchestrationId))
|
||||||
|
{
|
||||||
|
// fallback to orchid for C# actions-service
|
||||||
|
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(orchestrationId))
|
||||||
|
{
|
||||||
|
_trace.Info($"Pull OrchestrationId {orchestrationId} from runner JWT claims");
|
||||||
|
_userAgents.Insert(0, new ProductInfoHeaderValue("OrchestrationId", orchestrationId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_trace.Error("Fail to extract OrchestrationId from runner JWT claims");
|
||||||
|
_trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
|
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
|
||||||
@@ -343,6 +453,12 @@ namespace GitHub.Runner.Common
|
|||||||
".runner");
|
".runner");
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case WellKnownConfigFile.MigratedRunner:
|
||||||
|
path = Path.Combine(
|
||||||
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
|
".runner_migrated");
|
||||||
|
break;
|
||||||
|
|
||||||
case WellKnownConfigFile.Credentials:
|
case WellKnownConfigFile.Credentials:
|
||||||
path = Path.Combine(
|
path = Path.Combine(
|
||||||
GetDirectory(WellKnownDirectory.Root),
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
@@ -543,6 +659,18 @@ namespace GitHub.Runner.Common
|
|||||||
_loadContext.Unloading -= LoadContext_Unloading;
|
_loadContext.Unloading -= LoadContext_Unloading;
|
||||||
_loadContext = null;
|
_loadContext = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_authMigrationAutoReenableTask != null)
|
||||||
|
{
|
||||||
|
_authMigrationAutoReenableTaskCancellationTokenSource?.Cancel();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_authMigrationAutoReenableTaskCancellationTokenSource != null)
|
||||||
|
{
|
||||||
|
_authMigrationAutoReenableTaskCancellationTokenSource?.Dispose();
|
||||||
|
_authMigrationAutoReenableTaskCancellationTokenSource = null;
|
||||||
|
}
|
||||||
|
|
||||||
_httpTraceSubscription?.Dispose();
|
_httpTraceSubscription?.Dispose();
|
||||||
_diagListenerSubscription?.Dispose();
|
_diagListenerSubscription?.Dispose();
|
||||||
_traceManager?.Dispose();
|
_traceManager?.Dispose();
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
void InitializeLaunchClient(Uri uri, string token);
|
void InitializeLaunchClient(Uri uri, string token);
|
||||||
|
|
||||||
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
|
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class LaunchServer : RunnerService, ILaunchServer
|
public sealed class LaunchServer : RunnerService, ILaunchServer
|
||||||
@@ -42,12 +42,16 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
|
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors)
|
||||||
{
|
{
|
||||||
if (_launchClient != null)
|
if (_launchClient != null)
|
||||||
{
|
{
|
||||||
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
if (!displayHelpfulActionsDownloadErrors)
|
||||||
cancellationToken: cancellationToken);
|
{
|
||||||
|
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||||
|
cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
|
return _launchClient.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new InvalidOperationException("Launch client is not initialized.");
|
throw new InvalidOperationException("Launch client is not initialized.");
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||||
|
|
||||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
|
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken token);
|
||||||
|
|
||||||
Task CompleteJobAsync(
|
Task CompleteJobAsync(
|
||||||
Guid planId,
|
Guid planId,
|
||||||
@@ -29,6 +29,8 @@ namespace GitHub.Runner.Common
|
|||||||
IList<Annotation> jobAnnotations,
|
IList<Annotation> jobAnnotations,
|
||||||
string environmentUrl,
|
string environmentUrl,
|
||||||
IList<Telemetry> telemetry,
|
IList<Telemetry> telemetry,
|
||||||
|
string billingOwnerId,
|
||||||
|
string infrastructureFailureCategory,
|
||||||
CancellationToken token);
|
CancellationToken token);
|
||||||
|
|
||||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||||
@@ -58,11 +60,11 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
|
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest<AgentJobRequestMessage>(
|
return RetryRequest<AgentJobRequestMessage>(
|
||||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, cancellationToken), cancellationToken,
|
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, billingOwnerId, cancellationToken), cancellationToken,
|
||||||
shouldRetry: ex =>
|
shouldRetry: ex =>
|
||||||
ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404
|
ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404
|
||||||
ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409
|
ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409
|
||||||
@@ -78,18 +80,25 @@ namespace GitHub.Runner.Common
|
|||||||
IList<Annotation> jobAnnotations,
|
IList<Annotation> jobAnnotations,
|
||||||
string environmentUrl,
|
string environmentUrl,
|
||||||
IList<Telemetry> telemetry,
|
IList<Telemetry> telemetry,
|
||||||
|
string billingOwnerId,
|
||||||
|
string infrastructureFailureCategory,
|
||||||
CancellationToken cancellationToken)
|
CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest(
|
return RetryRequest(
|
||||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, cancellationToken), cancellationToken);
|
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, infrastructureFailureCategory, cancellationToken), cancellationToken,
|
||||||
|
shouldRetry: ex =>
|
||||||
|
ex is not VssUnauthorizedException && // HTTP status 401
|
||||||
|
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
return RetryRequest<RenewJobResponse>(
|
return RetryRequest<RenewJobResponse>(
|
||||||
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
|
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken,
|
||||||
|
shouldRetry: ex =>
|
||||||
|
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
|
Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId);
|
||||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -43,117 +44,15 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||||
{
|
{
|
||||||
var githubApiUrl = "";
|
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
|
||||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
|
||||||
var isOrgRunner = path.Length == 1;
|
|
||||||
var isRepoOrEnterpriseRunner = path.Length == 2;
|
|
||||||
var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase);
|
|
||||||
|
|
||||||
if (isOrgRunner)
|
|
||||||
{
|
|
||||||
// org runner
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isRepoOrEnterpriseRunner)
|
|
||||||
{
|
|
||||||
// Repository runner
|
|
||||||
if (isRepoRunner)
|
|
||||||
{
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Enterprise runner
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
|
||||||
}
|
|
||||||
|
|
||||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||||
|
|
||||||
return runnersList.ToTaskAgents();
|
return runnersList.ToTaskAgents();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||||
{
|
{
|
||||||
var githubApiUrl = "";
|
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runner-groups";
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
|
||||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
|
||||||
var isOrgRunner = path.Length == 1;
|
|
||||||
var isRepoOrEnterpriseRunner = path.Length == 2;
|
|
||||||
var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase);
|
|
||||||
|
|
||||||
if (isOrgRunner)
|
|
||||||
{
|
|
||||||
// org runner
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isRepoOrEnterpriseRunner)
|
|
||||||
{
|
|
||||||
// Repository Runner
|
|
||||||
if (isRepoRunner)
|
|
||||||
{
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
// Enterprise Runner
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
|
||||||
}
|
|
||||||
|
|
||||||
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||||
|
|
||||||
return agentPools?.ToAgentPoolList();
|
return agentPools?.ToAgentPoolList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -204,6 +103,12 @@ namespace GitHub.Runner.Common
|
|||||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId)
|
||||||
|
{
|
||||||
|
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners/{runnerId}";
|
||||||
|
await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Delete, 3, "Failed to delete agent");
|
||||||
|
}
|
||||||
|
|
||||||
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
|
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
|
||||||
{
|
{
|
||||||
int retry = 0;
|
int retry = 0;
|
||||||
@@ -220,13 +125,22 @@ namespace GitHub.Runner.Common
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
HttpResponseMessage response = null;
|
HttpResponseMessage response = null;
|
||||||
if (requestType == RequestType.Get)
|
switch (requestType)
|
||||||
{
|
{
|
||||||
response = await httpClient.GetAsync(githubApiUrl);
|
case RequestType.Get:
|
||||||
}
|
response = await httpClient.GetAsync(githubApiUrl);
|
||||||
else
|
break;
|
||||||
{
|
case RequestType.Post:
|
||||||
response = await httpClient.PostAsync(githubApiUrl, body);
|
response = await httpClient.PostAsync(githubApiUrl, body);
|
||||||
|
break;
|
||||||
|
case RequestType.Patch:
|
||||||
|
response = await httpClient.PatchAsync(githubApiUrl, body);
|
||||||
|
break;
|
||||||
|
case RequestType.Delete:
|
||||||
|
response = await httpClient.DeleteAsync(githubApiUrl);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new ArgumentOutOfRangeException(nameof(requestType), requestType, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (response != null)
|
if (response != null)
|
||||||
@@ -261,5 +175,61 @@ namespace GitHub.Runner.Common
|
|||||||
await Task.Delay(backOff);
|
await Task.Delay(backOff);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private string GetEntityUrl(string githubUrl)
|
||||||
|
{
|
||||||
|
var githubApiUrl = "";
|
||||||
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
|
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
var isOrgRunner = path.Length == 1;
|
||||||
|
var isRepoOrEnterpriseRunner = path.Length == 2;
|
||||||
|
var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
if (isOrgRunner)
|
||||||
|
{
|
||||||
|
// org runner
|
||||||
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (isRepoOrEnterpriseRunner)
|
||||||
|
{
|
||||||
|
// Repository Runner
|
||||||
|
if (isRepoRunner)
|
||||||
|
{
|
||||||
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Enterprise Runner
|
||||||
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return githubApiUrl;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
using GitHub.DistributedTask.WebApi;
|
using System;
|
||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Services.Common;
|
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
using GitHub.Services.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
{
|
{
|
||||||
@@ -50,7 +50,10 @@ namespace GitHub.Runner.Common
|
|||||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||||
|
|
||||||
// agent update
|
// agent update
|
||||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace);
|
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default);
|
||||||
|
|
||||||
|
// runner config refresh
|
||||||
|
Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||||
@@ -315,10 +318,17 @@ namespace GitHub.Runner.Common
|
|||||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace)
|
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace, cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
// runner config refresh
|
||||||
|
public Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
|
return _genericTaskAgentClient.RefreshRunnerConfigAsync(agentId, configType, encodedRunnerConfig, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -70,7 +70,8 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
protected async Task RetryRequest(Func<Task> func,
|
protected async Task RetryRequest(Func<Task> func,
|
||||||
CancellationToken cancellationToken,
|
CancellationToken cancellationToken,
|
||||||
int maxRetryAttemptsCount = 5
|
int maxAttempts = 5,
|
||||||
|
Func<Exception, bool> shouldRetry = null
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
async Task<Unit> wrappedFunc()
|
async Task<Unit> wrappedFunc()
|
||||||
@@ -78,31 +79,31 @@ namespace GitHub.Runner.Common
|
|||||||
await func();
|
await func();
|
||||||
return Unit.Value;
|
return Unit.Value;
|
||||||
}
|
}
|
||||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
|
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxAttempts, shouldRetry);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
||||||
CancellationToken cancellationToken,
|
CancellationToken cancellationToken,
|
||||||
int maxRetryAttemptsCount = 5,
|
int maxAttempts = 5,
|
||||||
Func<Exception, bool> shouldRetry = null
|
Func<Exception, bool> shouldRetry = null
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
var retryCount = 0;
|
var attempt = 0;
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
retryCount++;
|
attempt++;
|
||||||
cancellationToken.ThrowIfCancellationRequested();
|
cancellationToken.ThrowIfCancellationRequested();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return await func();
|
return await func();
|
||||||
}
|
}
|
||||||
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
||||||
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
|
catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex)))
|
||||||
{
|
{
|
||||||
Trace.Error("Catch exception during request");
|
Trace.Error("Catch exception during request");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
|
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
|
||||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
|
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left.");
|
||||||
await Task.Delay(backOff, cancellationToken);
|
await Task.Delay(backOff, cancellationToken);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,33 @@
|
|||||||
using System;
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.Collections.ObjectModel;
|
using System.Collections.ObjectModel;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Util
|
namespace GitHub.Runner.Common.Util
|
||||||
{
|
{
|
||||||
public static class NodeUtil
|
public static class NodeUtil
|
||||||
{
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Represents details about an environment variable, including its value and source
|
||||||
|
/// </summary>
|
||||||
|
private class EnvironmentVariableInfo
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets whether the value evaluates to true
|
||||||
|
/// </summary>
|
||||||
|
public bool IsTrue { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets whether the value came from the workflow environment
|
||||||
|
/// </summary>
|
||||||
|
public bool FromWorkflow { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets or sets whether the value came from the system environment
|
||||||
|
/// </summary>
|
||||||
|
public bool FromSystem { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
private const string _defaultNodeVersion = "node20";
|
private const string _defaultNodeVersion = "node20";
|
||||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node20" });
|
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node20" });
|
||||||
public static string GetInternalNodeVersion()
|
public static string GetInternalNodeVersion()
|
||||||
@@ -18,5 +41,122 @@ namespace GitHub.Runner.Common.Util
|
|||||||
}
|
}
|
||||||
return _defaultNodeVersion;
|
return _defaultNodeVersion;
|
||||||
}
|
}
|
||||||
|
/// <summary>
|
||||||
|
/// Determines the appropriate Node version for Actions to use
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||||
|
/// <param name="useNode24ByDefault">Feature flag indicating if Node 24 should be the default</param>
|
||||||
|
/// <param name="requireNode24">Feature flag indicating if Node 24 is required</param>
|
||||||
|
/// <returns>The Node version to use (node20 or node24) and warning message if both env vars are set</returns>
|
||||||
|
public static (string nodeVersion, string warningMessage) DetermineActionsNodeVersion(
|
||||||
|
IDictionary<string, string> workflowEnvironment = null,
|
||||||
|
bool useNode24ByDefault = false,
|
||||||
|
bool requireNode24 = false)
|
||||||
|
{
|
||||||
|
// Phase 3: Always use Node 24 regardless of environment variables
|
||||||
|
if (requireNode24)
|
||||||
|
{
|
||||||
|
return (Constants.Runner.NodeMigration.Node24, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get environment variable details with source information
|
||||||
|
var forceNode24Details = GetEnvironmentVariableDetails(
|
||||||
|
Constants.Runner.NodeMigration.ForceNode24Variable, workflowEnvironment);
|
||||||
|
|
||||||
|
var allowUnsecureNodeDetails = GetEnvironmentVariableDetails(
|
||||||
|
Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, workflowEnvironment);
|
||||||
|
|
||||||
|
bool forceNode24 = forceNode24Details.IsTrue;
|
||||||
|
bool allowUnsecureNode = allowUnsecureNodeDetails.IsTrue;
|
||||||
|
string warningMessage = null;
|
||||||
|
|
||||||
|
// Check if both flags are set from the same source
|
||||||
|
bool bothFromWorkflow = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||||
|
forceNode24Details.FromWorkflow && allowUnsecureNodeDetails.FromWorkflow;
|
||||||
|
|
||||||
|
bool bothFromSystem = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||||
|
forceNode24Details.FromSystem && allowUnsecureNodeDetails.FromSystem;
|
||||||
|
|
||||||
|
// Handle the case when both are set in the same source
|
||||||
|
if (bothFromWorkflow || bothFromSystem)
|
||||||
|
{
|
||||||
|
string source = bothFromWorkflow ? "workflow" : "system";
|
||||||
|
string defaultVersion = useNode24ByDefault ? Constants.Runner.NodeMigration.Node24 : Constants.Runner.NodeMigration.Node20;
|
||||||
|
warningMessage = $"Both {Constants.Runner.NodeMigration.ForceNode24Variable} and {Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable} environment variables are set to true in the {source} environment. This is likely a configuration error. Using the default Node version: {defaultVersion}.";
|
||||||
|
return (defaultVersion, warningMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 2: Node 24 is the default
|
||||||
|
if (useNode24ByDefault)
|
||||||
|
{
|
||||||
|
if (allowUnsecureNode)
|
||||||
|
{
|
||||||
|
return (Constants.Runner.NodeMigration.Node20, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (Constants.Runner.NodeMigration.Node24, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 1: Node 20 is the default
|
||||||
|
if (forceNode24)
|
||||||
|
{
|
||||||
|
return (Constants.Runner.NodeMigration.Node24, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (Constants.Runner.NodeMigration.Node20, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Checks if Node24 is requested but running on ARM32 Linux, and determines if fallback is needed.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="preferredVersion">The preferred Node version</param>
|
||||||
|
/// <returns>A tuple containing the adjusted node version and an optional warning message</returns>
|
||||||
|
public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(string preferredVersion)
|
||||||
|
{
|
||||||
|
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) &&
|
||||||
|
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) &&
|
||||||
|
Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
|
||||||
|
{
|
||||||
|
return (Constants.Runner.NodeMigration.Node20, "Node 24 is not supported on Linux ARM32 platforms. Falling back to Node 20.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (preferredVersion, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets detailed information about an environment variable from both workflow and system environments
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="variableName">The name of the environment variable</param>
|
||||||
|
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||||
|
/// <returns>An EnvironmentVariableInfo object containing details about the variable from both sources</returns>
|
||||||
|
private static EnvironmentVariableInfo GetEnvironmentVariableDetails(string variableName, IDictionary<string, string> workflowEnvironment)
|
||||||
|
{
|
||||||
|
var info = new EnvironmentVariableInfo();
|
||||||
|
|
||||||
|
// Check workflow environment
|
||||||
|
bool foundInWorkflow = false;
|
||||||
|
string workflowValue = null;
|
||||||
|
|
||||||
|
if (workflowEnvironment != null && workflowEnvironment.TryGetValue(variableName, out workflowValue))
|
||||||
|
{
|
||||||
|
foundInWorkflow = true;
|
||||||
|
info.FromWorkflow = true;
|
||||||
|
info.IsTrue = StringUtil.ConvertToBoolean(workflowValue); // Workflow value takes precedence for the boolean value
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also check system environment
|
||||||
|
string systemValue = Environment.GetEnvironmentVariable(variableName);
|
||||||
|
bool foundInSystem = !string.IsNullOrEmpty(systemValue);
|
||||||
|
|
||||||
|
info.FromSystem = foundInSystem;
|
||||||
|
|
||||||
|
// If not found in workflow, use system values
|
||||||
|
if (!foundInWorkflow)
|
||||||
|
{
|
||||||
|
info.IsTrue = StringUtil.ConvertToBoolean(systemValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
return info;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,9 @@ using System.Threading;
|
|||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Listener.Configuration;
|
using GitHub.Runner.Listener.Configuration;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.OAuth;
|
using GitHub.Services.OAuth;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
@@ -23,34 +23,67 @@ namespace GitHub.Runner.Listener
|
|||||||
private RunnerSettings _settings;
|
private RunnerSettings _settings;
|
||||||
private ITerminal _term;
|
private ITerminal _term;
|
||||||
private TimeSpan _getNextMessageRetryInterval;
|
private TimeSpan _getNextMessageRetryInterval;
|
||||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||||
private CancellationTokenSource _getMessagesTokenSource;
|
private CancellationTokenSource _getMessagesTokenSource;
|
||||||
private VssCredentials _creds;
|
private VssCredentials _creds;
|
||||||
|
private VssCredentials _credsV2;
|
||||||
private TaskAgentSession _session;
|
private TaskAgentSession _session;
|
||||||
|
private IRunnerServer _runnerServer;
|
||||||
private IBrokerServer _brokerServer;
|
private IBrokerServer _brokerServer;
|
||||||
|
private ICredentialManager _credMgr;
|
||||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||||
private bool _accessTokenRevoked = false;
|
private bool _accessTokenRevoked = false;
|
||||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||||
|
private bool _needRefreshCredsV2 = false;
|
||||||
|
private bool _handlerInitialized = false;
|
||||||
|
private bool _isMigratedSettings = false;
|
||||||
|
private const int _maxMigratedSettingsRetries = 3;
|
||||||
|
private int _migratedSettingsRetryCount = 0;
|
||||||
|
|
||||||
|
public BrokerMessageListener()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public BrokerMessageListener(RunnerSettings settings, bool isMigratedSettings = false)
|
||||||
|
{
|
||||||
|
_settings = settings;
|
||||||
|
_isMigratedSettings = isMigratedSettings;
|
||||||
|
}
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
|
|
||||||
_term = HostContext.GetService<ITerminal>();
|
_term = HostContext.GetService<ITerminal>();
|
||||||
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
_brokerServer = HostContext.GetService<IBrokerServer>();
|
_brokerServer = HostContext.GetService<IBrokerServer>();
|
||||||
|
_credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
|
|
||||||
// Settings
|
// Load settings if not provided through constructor
|
||||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
if (_settings == null)
|
||||||
_settings = configManager.LoadSettings();
|
{
|
||||||
var serverUrl = _settings.ServerUrlV2;
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
|
_settings = configManager.LoadSettings();
|
||||||
|
Trace.Info("Settings loaded from config manager");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info("Using provided settings");
|
||||||
|
if (_isMigratedSettings)
|
||||||
|
{
|
||||||
|
Trace.Info("Using migrated settings from .runner_migrated");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var serverUrlV2 = _settings.ServerUrlV2;
|
||||||
|
var serverUrl = _settings.ServerUrl;
|
||||||
Trace.Info(_settings);
|
Trace.Info(_settings);
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||||
@@ -60,8 +93,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
// Create connection.
|
// Create connection.
|
||||||
Trace.Info("Loading Credentials");
|
Trace.Info("Loading Credentials");
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||||
_creds = credMgr.LoadCredentials();
|
|
||||||
|
|
||||||
var agent = new TaskAgentReference
|
var agent = new TaskAgentReference
|
||||||
{
|
{
|
||||||
@@ -84,9 +116,18 @@ namespace GitHub.Runner.Listener
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
Trace.Info("Connecting to the Broker Server...");
|
Trace.Info("Connecting to the Broker Server...");
|
||||||
await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
|
await _brokerServer.ConnectAsync(new Uri(serverUrlV2), _credsV2);
|
||||||
Trace.Info("VssConnection created");
|
Trace.Info("VssConnection created");
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(serverUrl) &&
|
||||||
|
!string.Equals(serverUrl, serverUrlV2, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
Trace.Info("Connecting to the Runner server...");
|
||||||
|
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||||
|
Trace.Info("VssConnection created");
|
||||||
|
}
|
||||||
|
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
_term.WriteSuccessMessage("Connected to GitHub");
|
_term.WriteSuccessMessage("Connected to GitHub");
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
@@ -101,6 +142,13 @@ namespace GitHub.Runner.Listener
|
|||||||
encounteringError = false;
|
encounteringError = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!_handlerInitialized)
|
||||||
|
{
|
||||||
|
// Register event handler for auth migration state change
|
||||||
|
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||||
|
_handlerInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
return CreateSessionResult.Success;
|
return CreateSessionResult.Success;
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
@@ -119,7 +167,22 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during create session.");
|
Trace.Error("Catch exception during create session.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
// If using migrated settings, limit the number of retries before returning failure
|
||||||
|
if (_isMigratedSettings)
|
||||||
|
{
|
||||||
|
_migratedSettingsRetryCount++;
|
||||||
|
Trace.Warning($"Migrated settings retry {_migratedSettingsRetryCount} of {_maxMigratedSettingsRetries}");
|
||||||
|
|
||||||
|
if (_migratedSettingsRetryCount >= _maxMigratedSettingsRetries)
|
||||||
|
{
|
||||||
|
Trace.Warning("Reached maximum retry attempts for migrated settings. Returning failure to try default settings.");
|
||||||
|
return CreateSessionResult.Failure;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!HostContext.AllowAuthMigration &&
|
||||||
|
ex is VssOAuthTokenRequestException vssOAuthEx &&
|
||||||
|
_credsV2.Federated is VssOAuthCredential vssOAuthCred)
|
||||||
{
|
{
|
||||||
// "invalid_client" means the runner registration has been deleted from the server.
|
// "invalid_client" means the runner registration has been deleted from the server.
|
||||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
@@ -131,7 +194,7 @@ namespace GitHub.Runner.Listener
|
|||||||
// Check whether we get 401 because the runner registration already removed by the service.
|
// Check whether we get 401 because the runner registration already removed by the service.
|
||||||
// If the runner registration get deleted, we can't exchange oauth token.
|
// If the runner registration get deleted, we can't exchange oauth token.
|
||||||
Trace.Error("Test oauth app registration.");
|
Trace.Error("Test oauth app registration.");
|
||||||
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
|
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrlV2));
|
||||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
@@ -140,7 +203,8 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!IsSessionCreationExceptionRetriable(ex))
|
if (!HostContext.AllowAuthMigration &&
|
||||||
|
!IsSessionCreationExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||||
if (ex is TaskAgentSessionConflictException)
|
if (ex is TaskAgentSessionConflictException)
|
||||||
@@ -150,6 +214,12 @@ namespace GitHub.Runner.Listener
|
|||||||
return CreateSessionResult.Failure;
|
return CreateSessionResult.Failure;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (HostContext.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
Trace.Info("Disable migration mode for 60 minutes.");
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Session creation failed with exception: {ex}");
|
||||||
|
}
|
||||||
|
|
||||||
if (!encounteringError) //print the message only on the first error
|
if (!encounteringError) //print the message only on the first error
|
||||||
{
|
{
|
||||||
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
||||||
@@ -166,6 +236,11 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
if (_session != null && _session.SessionId != Guid.Empty)
|
if (_session != null && _session.SessionId != Guid.Empty)
|
||||||
{
|
{
|
||||||
|
if (_handlerInitialized)
|
||||||
|
{
|
||||||
|
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||||
|
}
|
||||||
|
|
||||||
if (!_accessTokenRevoked)
|
if (!_accessTokenRevoked)
|
||||||
{
|
{
|
||||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
@@ -183,7 +258,7 @@ namespace GitHub.Runner.Listener
|
|||||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||||
{
|
{
|
||||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||||
runnerStatus = e.Status;
|
_runnerStatus = e.Status;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
_getMessagesTokenSource?.Cancel();
|
_getMessagesTokenSource?.Cancel();
|
||||||
@@ -208,8 +283,15 @@ namespace GitHub.Runner.Listener
|
|||||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
if (_needRefreshCredsV2)
|
||||||
|
{
|
||||||
|
Trace.Info("Refreshing broker connection.");
|
||||||
|
await RefreshBrokerConnectionAsync();
|
||||||
|
_needRefreshCredsV2 = false;
|
||||||
|
}
|
||||||
|
|
||||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||||
runnerStatus,
|
_runnerStatus,
|
||||||
BuildConstants.RunnerPackage.Version,
|
BuildConstants.RunnerPackage.Version,
|
||||||
VarUtil.OS,
|
VarUtil.OS,
|
||||||
VarUtil.OSArchitecture,
|
VarUtil.OSArchitecture,
|
||||||
@@ -238,11 +320,16 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
catch (HostedRunnerDeprovisionedException)
|
||||||
|
{
|
||||||
|
Trace.Info("Hosted runner has been deprovisioned.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (RunnerNotFoundException)
|
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -251,7 +338,8 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during get next message.");
|
Trace.Error("Catch exception during get next message.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
if (!HostContext.AllowAuthMigration &&
|
||||||
|
!IsGetNextMessageExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||||
}
|
}
|
||||||
@@ -282,6 +370,12 @@ namespace GitHub.Runner.Listener
|
|||||||
encounteringError = true;
|
encounteringError = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (HostContext.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
Trace.Info("Disable migration mode for 60 minutes.");
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||||
|
}
|
||||||
|
|
||||||
// re-create VssConnection before next retry
|
// re-create VssConnection before next retry
|
||||||
await RefreshBrokerConnectionAsync();
|
await RefreshBrokerConnectionAsync();
|
||||||
|
|
||||||
@@ -313,7 +407,7 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
public async Task RefreshListenerTokenAsync()
|
||||||
{
|
{
|
||||||
await RefreshBrokerConnectionAsync();
|
await RefreshBrokerConnectionAsync();
|
||||||
}
|
}
|
||||||
@@ -323,6 +417,21 @@ namespace GitHub.Runner.Listener
|
|||||||
await Task.CompletedTask;
|
await Task.CompletedTask;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||||
|
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||||
|
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||||
|
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||||
|
runnerRequestId,
|
||||||
|
_session.SessionId,
|
||||||
|
_runnerStatus,
|
||||||
|
BuildConstants.RunnerPackage.Version,
|
||||||
|
VarUtil.OS,
|
||||||
|
VarUtil.OSArchitecture,
|
||||||
|
linkedCts.Token);
|
||||||
|
}
|
||||||
|
|
||||||
private bool IsGetNextMessageExceptionRetriable(Exception ex)
|
private bool IsGetNextMessageExceptionRetriable(Exception ex)
|
||||||
{
|
{
|
||||||
if (ex is TaskAgentNotFoundException ||
|
if (ex is TaskAgentNotFoundException ||
|
||||||
@@ -416,17 +525,16 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
private async Task RefreshBrokerConnectionAsync()
|
private async Task RefreshBrokerConnectionAsync()
|
||||||
{
|
{
|
||||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
Trace.Info("Reload credentials.");
|
||||||
_settings = configManager.LoadSettings();
|
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
|
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), _credsV2);
|
||||||
|
Trace.Info("Connection to Broker Server recreated.");
|
||||||
|
}
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||||
{
|
{
|
||||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||||
}
|
_needRefreshCredsV2 = true;
|
||||||
|
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
|
||||||
VssCredentials creds = credMgr.LoadCredentials();
|
|
||||||
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
Task UnconfigureAsync(CommandSettings command);
|
Task UnconfigureAsync(CommandSettings command);
|
||||||
void DeleteLocalRunnerConfig();
|
void DeleteLocalRunnerConfig();
|
||||||
RunnerSettings LoadSettings();
|
RunnerSettings LoadSettings();
|
||||||
|
RunnerSettings LoadMigratedSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ConfigurationManager : RunnerService, IConfigurationManager
|
public sealed class ConfigurationManager : RunnerService, IConfigurationManager
|
||||||
@@ -66,6 +67,22 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
return settings;
|
return settings;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public RunnerSettings LoadMigratedSettings()
|
||||||
|
{
|
||||||
|
Trace.Info(nameof(LoadMigratedSettings));
|
||||||
|
|
||||||
|
// Check if migrated settings file exists
|
||||||
|
if (!_store.IsMigratedConfigured())
|
||||||
|
{
|
||||||
|
throw new NonRetryableException("No migrated configuration found.");
|
||||||
|
}
|
||||||
|
|
||||||
|
RunnerSettings settings = _store.GetMigratedSettings();
|
||||||
|
Trace.Info("Migrated Settings Loaded");
|
||||||
|
|
||||||
|
return settings;
|
||||||
|
}
|
||||||
|
|
||||||
public async Task ConfigureAsync(CommandSettings command)
|
public async Task ConfigureAsync(CommandSettings command)
|
||||||
{
|
{
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
@@ -127,7 +144,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
runnerSettings.ServerUrl = inputUrl;
|
runnerSettings.ServerUrl = inputUrl;
|
||||||
// Get the credentials
|
// Get the credentials
|
||||||
credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl);
|
credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl);
|
||||||
creds = credProvider.GetVssCredentials(HostContext);
|
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||||
Trace.Info("legacy vss cred retrieved");
|
Trace.Info("legacy vss cred retrieved");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -136,8 +153,8 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||||
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
runnerSettings.UseRunnerAdminFlow = authResult.UseRunnerAdminFlow;
|
||||||
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
Trace.Info($"Using runner-admin flow: {runnerSettings.UseRunnerAdminFlow}");
|
||||||
creds = authResult.ToVssCredentials();
|
creds = authResult.ToVssCredentials();
|
||||||
Trace.Info("cred retrieved via GitHub auth");
|
Trace.Info("cred retrieved via GitHub auth");
|
||||||
}
|
}
|
||||||
@@ -194,7 +211,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
string poolName = null;
|
string poolName = null;
|
||||||
TaskAgentPool agentPool = null;
|
TaskAgentPool agentPool = null;
|
||||||
List<TaskAgentPool> agentPools;
|
List<TaskAgentPool> agentPools;
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseRunnerAdminFlow)
|
||||||
{
|
{
|
||||||
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
|
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
|
||||||
}
|
}
|
||||||
@@ -242,7 +259,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
var userLabels = command.GetLabels();
|
var userLabels = command.GetLabels();
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
List<TaskAgent> agents;
|
List<TaskAgent> agents;
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseRunnerAdminFlow)
|
||||||
{
|
{
|
||||||
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
@@ -263,10 +280,11 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseRunnerAdminFlow)
|
||||||
{
|
{
|
||||||
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||||
|
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||||
|
|
||||||
agent.Id = runner.Id;
|
agent.Id = runner.Id;
|
||||||
agent.Authorization = new TaskAgentAuthorization()
|
agent.Authorization = new TaskAgentAuthorization()
|
||||||
@@ -274,6 +292,13 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||||
|
{
|
||||||
|
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||||
|
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||||
|
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -313,10 +338,11 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseRunnerAdminFlow)
|
||||||
{
|
{
|
||||||
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||||
|
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||||
|
|
||||||
agent.Id = runner.Id;
|
agent.Id = runner.Id;
|
||||||
agent.Authorization = new TaskAgentAuthorization()
|
agent.Authorization = new TaskAgentAuthorization()
|
||||||
@@ -324,6 +350,13 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||||
|
{
|
||||||
|
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||||
|
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||||
|
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -366,25 +399,46 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() }
|
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", true).ToString() }
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (agent.Properties.GetValue("EnableAuthMigrationByDefault", false) &&
|
||||||
|
agent.Properties.TryGetValue<string>("AuthorizationUrlV2", out var authUrlV2) &&
|
||||||
|
!string.IsNullOrEmpty(authUrlV2))
|
||||||
|
{
|
||||||
|
credentialData.Data["enableAuthMigrationByDefault"] = "true";
|
||||||
|
credentialData.Data["authorizationUrlV2"] = authUrlV2;
|
||||||
|
}
|
||||||
|
|
||||||
// Save the negotiated OAuth credential data
|
// Save the negotiated OAuth credential data
|
||||||
_store.SaveCredential(credentialData);
|
_store.SaveCredential(credentialData);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|
||||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// allow the server to override the serverUrlV2 and useV2Flow
|
||||||
|
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
|
||||||
|
!string.IsNullOrEmpty(serverUrlV2))
|
||||||
|
{
|
||||||
|
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
|
||||||
|
runnerSettings.ServerUrlV2 = serverUrlV2;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
|
||||||
|
{
|
||||||
|
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
|
||||||
|
runnerSettings.UseV2Flow = useV2Flow;
|
||||||
|
}
|
||||||
|
|
||||||
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||||
|
|
||||||
if (!runnerSettings.UseV2Flow)
|
if (!runnerSettings.UseV2Flow && !runnerSettings.UseRunnerAdminFlow)
|
||||||
{
|
{
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
VssCredentials credential = credMgr.LoadCredentials();
|
VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
||||||
@@ -498,41 +552,50 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
if (isConfigured && hasCredentials)
|
if (isConfigured && hasCredentials)
|
||||||
{
|
{
|
||||||
RunnerSettings settings = _store.GetSettings();
|
RunnerSettings settings = _store.GetSettings();
|
||||||
var credentialManager = HostContext.GetService<ICredentialManager>();
|
|
||||||
|
|
||||||
// Get the credentials
|
if (settings.UseRunnerAdminFlow)
|
||||||
VssCredentials creds = null;
|
|
||||||
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
|
||||||
{
|
|
||||||
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
|
||||||
creds = credProvider.GetVssCredentials(HostContext);
|
|
||||||
Trace.Info("legacy vss cred retrieved");
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
{
|
||||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId);
|
||||||
creds = authResult.ToVssCredentials();
|
|
||||||
Trace.Info("cred retrieved via GitHub auth");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
|
||||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
|
||||||
|
|
||||||
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
|
||||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
|
||||||
TaskAgent agent = agents.FirstOrDefault();
|
|
||||||
if (agent == null)
|
|
||||||
{
|
|
||||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
var credentialManager = HostContext.GetService<ICredentialManager>();
|
||||||
|
|
||||||
_term.WriteLine();
|
// Get the credentials
|
||||||
_term.WriteSuccessMessage("Runner removed successfully");
|
VssCredentials creds = null;
|
||||||
|
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
||||||
|
{
|
||||||
|
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
||||||
|
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||||
|
Trace.Info("legacy vss cred retrieved");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||||
|
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||||
|
creds = authResult.ToVssCredentials();
|
||||||
|
Trace.Info("cred retrieved via GitHub auth");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||||
|
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||||
|
|
||||||
|
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
||||||
|
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||||
|
TaskAgent agent = agents.FirstOrDefault();
|
||||||
|
if (agent == null)
|
||||||
|
{
|
||||||
|
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_term.WriteLine();
|
||||||
|
_term.WriteSuccessMessage("Runner removed successfully");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
public interface ICredentialManager : IRunnerService
|
public interface ICredentialManager : IRunnerService
|
||||||
{
|
{
|
||||||
ICredentialProvider GetCredentialProvider(string credType);
|
ICredentialProvider GetCredentialProvider(string credType);
|
||||||
VssCredentials LoadCredentials();
|
VssCredentials LoadCredentials(bool allowAuthUrlV2);
|
||||||
}
|
}
|
||||||
|
|
||||||
public class CredentialManager : RunnerService, ICredentialManager
|
public class CredentialManager : RunnerService, ICredentialManager
|
||||||
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
return creds;
|
return creds;
|
||||||
}
|
}
|
||||||
|
|
||||||
public VssCredentials LoadCredentials()
|
public VssCredentials LoadCredentials(bool allowAuthUrlV2)
|
||||||
{
|
{
|
||||||
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
|
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
|
||||||
|
|
||||||
@@ -51,21 +51,16 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
CredentialData credData = store.GetCredentials();
|
CredentialData credData = store.GetCredentials();
|
||||||
var migratedCred = store.GetMigratedCredentials();
|
var migratedCred = store.GetMigratedCredentials();
|
||||||
if (migratedCred != null)
|
if (migratedCred != null &&
|
||||||
|
migratedCred.Scheme == Constants.Configuration.OAuth)
|
||||||
{
|
{
|
||||||
credData = migratedCred;
|
credData = migratedCred;
|
||||||
|
|
||||||
// Re-write .credentials with Token URL
|
|
||||||
store.SaveCredential(credData);
|
|
||||||
|
|
||||||
// Delete .credentials_migrated
|
|
||||||
store.DeleteMigratedCredential();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||||
credProv.CredentialData = credData;
|
credProv.CredentialData = credData;
|
||||||
|
|
||||||
VssCredentials creds = credProv.GetVssCredentials(HostContext);
|
VssCredentials creds = credProv.GetVssCredentials(HostContext, allowAuthUrlV2);
|
||||||
|
|
||||||
return creds;
|
return creds;
|
||||||
}
|
}
|
||||||
@@ -94,7 +89,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
public string Token { get; set; }
|
public string Token { get; set; }
|
||||||
|
|
||||||
[DataMember(Name = "use_v2_flow")]
|
[DataMember(Name = "use_v2_flow")]
|
||||||
public bool UseV2Flow { get; set; }
|
public bool UseRunnerAdminFlow { get; set; }
|
||||||
|
|
||||||
public VssCredentials ToVssCredentials()
|
public VssCredentials ToVssCredentials()
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
using System;
|
using System;
|
||||||
using GitHub.Services.Common;
|
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.OAuth;
|
using GitHub.Services.OAuth;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener.Configuration
|
namespace GitHub.Runner.Listener.Configuration
|
||||||
@@ -10,7 +10,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
Boolean RequireInteractive { get; }
|
Boolean RequireInteractive { get; }
|
||||||
CredentialData CredentialData { get; set; }
|
CredentialData CredentialData { get; set; }
|
||||||
VssCredentials GetVssCredentials(IHostContext context);
|
VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||||
void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
public virtual Boolean RequireInteractive => false;
|
public virtual Boolean RequireInteractive => false;
|
||||||
public CredentialData CredentialData { get; set; }
|
public CredentialData CredentialData { get; set; }
|
||||||
|
|
||||||
public abstract VssCredentials GetVssCredentials(IHostContext context);
|
public abstract VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||||
public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { }
|
public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { }
|
||||||
|
|
||||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNull(context, nameof(context));
|
ArgUtil.NotNull(context, nameof(context));
|
||||||
Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential));
|
Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential));
|
||||||
|
|||||||
@@ -22,10 +22,18 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
// Nothing to verify here
|
// Nothing to verify here
|
||||||
}
|
}
|
||||||
|
|
||||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||||
{
|
{
|
||||||
var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null);
|
var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null);
|
||||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||||
|
var authorizationUrlV2 = this.CredentialData.Data.GetValueOrDefault("authorizationUrlV2", null);
|
||||||
|
|
||||||
|
if (allowAuthUrlV2 &&
|
||||||
|
!string.IsNullOrEmpty(authorizationUrlV2) &&
|
||||||
|
context.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
authorizationUrl = authorizationUrlV2;
|
||||||
|
}
|
||||||
|
|
||||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||||
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||||
|
|||||||
@@ -110,7 +110,12 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
var jwt = JsonWebToken.Create(accessToken);
|
var jwt = JsonWebToken.Create(accessToken);
|
||||||
var claims = jwt.ExtractClaims();
|
var claims = jwt.ExtractClaims();
|
||||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||||
|
if (string.IsNullOrEmpty(orchestrationId))
|
||||||
|
{
|
||||||
|
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||||
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(orchestrationId))
|
if (!string.IsNullOrEmpty(orchestrationId))
|
||||||
{
|
{
|
||||||
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
||||||
@@ -1206,7 +1211,7 @@ namespace GitHub.Runner.Listener
|
|||||||
jobAnnotations.Add(annotation.Value);
|
jobAnnotations.Add(annotation.Value);
|
||||||
}
|
}
|
||||||
|
|
||||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, CancellationToken.None);
|
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: null, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -32,8 +32,9 @@ namespace GitHub.Runner.Listener
|
|||||||
Task DeleteSessionAsync();
|
Task DeleteSessionAsync();
|
||||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||||
|
Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken);
|
||||||
|
|
||||||
Task RefreshListenerTokenAsync(CancellationToken token);
|
Task RefreshListenerTokenAsync();
|
||||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,6 +45,7 @@ namespace GitHub.Runner.Listener
|
|||||||
private ITerminal _term;
|
private ITerminal _term;
|
||||||
private IRunnerServer _runnerServer;
|
private IRunnerServer _runnerServer;
|
||||||
private IBrokerServer _brokerServer;
|
private IBrokerServer _brokerServer;
|
||||||
|
private ICredentialManager _credMgr;
|
||||||
private TaskAgentSession _session;
|
private TaskAgentSession _session;
|
||||||
private TimeSpan _getNextMessageRetryInterval;
|
private TimeSpan _getNextMessageRetryInterval;
|
||||||
private bool _accessTokenRevoked = false;
|
private bool _accessTokenRevoked = false;
|
||||||
@@ -51,11 +53,12 @@ namespace GitHub.Runner.Listener
|
|||||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||||
private CancellationTokenSource _getMessagesTokenSource;
|
private CancellationTokenSource _getMessagesTokenSource;
|
||||||
private VssCredentials _creds;
|
private VssCredentials _creds;
|
||||||
|
private VssCredentials _credsV2;
|
||||||
private bool _isBrokerSession = false;
|
private bool _needRefreshCredsV2 = false;
|
||||||
|
private bool _handlerInitialized = false;
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
@@ -64,6 +67,7 @@ namespace GitHub.Runner.Listener
|
|||||||
_term = HostContext.GetService<ITerminal>();
|
_term = HostContext.GetService<ITerminal>();
|
||||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
_brokerServer = hostContext.GetService<IBrokerServer>();
|
_brokerServer = hostContext.GetService<IBrokerServer>();
|
||||||
|
_credMgr = hostContext.GetService<ICredentialManager>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||||
@@ -78,8 +82,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
// Create connection.
|
// Create connection.
|
||||||
Trace.Info("Loading Credentials");
|
Trace.Info("Loading Credentials");
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||||
_creds = credMgr.LoadCredentials();
|
|
||||||
|
|
||||||
var agent = new TaskAgentReference
|
var agent = new TaskAgentReference
|
||||||
{
|
{
|
||||||
@@ -113,16 +116,6 @@ namespace GitHub.Runner.Listener
|
|||||||
_settings.PoolId,
|
_settings.PoolId,
|
||||||
taskAgentSession,
|
taskAgentSession,
|
||||||
token);
|
token);
|
||||||
|
|
||||||
if (_session.BrokerMigrationMessage != null)
|
|
||||||
{
|
|
||||||
Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl);
|
|
||||||
|
|
||||||
await _brokerServer.UpdateConnectionIfNeeded(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds);
|
|
||||||
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
|
||||||
_isBrokerSession = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Trace.Info($"Session created.");
|
Trace.Info($"Session created.");
|
||||||
if (encounteringError)
|
if (encounteringError)
|
||||||
{
|
{
|
||||||
@@ -131,6 +124,13 @@ namespace GitHub.Runner.Listener
|
|||||||
encounteringError = false;
|
encounteringError = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!_handlerInitialized)
|
||||||
|
{
|
||||||
|
Trace.Info("Registering AuthMigrationChanged event handler.");
|
||||||
|
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||||
|
_handlerInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
return CreateSessionResult.Success;
|
return CreateSessionResult.Success;
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||||
@@ -196,16 +196,16 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
if (_session != null && _session.SessionId != Guid.Empty)
|
if (_session != null && _session.SessionId != Guid.Empty)
|
||||||
{
|
{
|
||||||
|
if (_handlerInitialized)
|
||||||
|
{
|
||||||
|
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||||
|
}
|
||||||
|
|
||||||
if (!_accessTokenRevoked)
|
if (!_accessTokenRevoked)
|
||||||
{
|
{
|
||||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
{
|
{
|
||||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||||
|
|
||||||
if (_isBrokerSession)
|
|
||||||
{
|
|
||||||
await _brokerServer.DeleteSessionAsync(ts.Token);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -218,7 +218,7 @@ namespace GitHub.Runner.Listener
|
|||||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||||
{
|
{
|
||||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||||
runnerStatus = e.Status;
|
_runnerStatus = e.Status;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
_getMessagesTokenSource?.Cancel();
|
_getMessagesTokenSource?.Cancel();
|
||||||
@@ -251,7 +251,7 @@ namespace GitHub.Runner.Listener
|
|||||||
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
|
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
|
||||||
_session.SessionId,
|
_session.SessionId,
|
||||||
_lastMessageId,
|
_lastMessageId,
|
||||||
runnerStatus,
|
_runnerStatus,
|
||||||
BuildConstants.RunnerPackage.Version,
|
BuildConstants.RunnerPackage.Version,
|
||||||
VarUtil.OS,
|
VarUtil.OS,
|
||||||
VarUtil.OSArchitecture,
|
VarUtil.OSArchitecture,
|
||||||
@@ -261,14 +261,21 @@ namespace GitHub.Runner.Listener
|
|||||||
// Decrypt the message body if the session is using encryption
|
// Decrypt the message body if the session is using encryption
|
||||||
message = DecryptMessage(message);
|
message = DecryptMessage(message);
|
||||||
|
|
||||||
|
|
||||||
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
||||||
{
|
{
|
||||||
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
||||||
|
|
||||||
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _creds);
|
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
|
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _credsV2);
|
||||||
|
if (_needRefreshCredsV2)
|
||||||
|
{
|
||||||
|
Trace.Info("Refreshing credentials for V2.");
|
||||||
|
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||||
|
_needRefreshCredsV2 = false;
|
||||||
|
}
|
||||||
|
|
||||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||||
runnerStatus,
|
_runnerStatus,
|
||||||
BuildConstants.RunnerPackage.Version,
|
BuildConstants.RunnerPackage.Version,
|
||||||
VarUtil.OS,
|
VarUtil.OS,
|
||||||
VarUtil.OSArchitecture,
|
VarUtil.OSArchitecture,
|
||||||
@@ -304,11 +311,16 @@ namespace GitHub.Runner.Listener
|
|||||||
_accessTokenRevoked = true;
|
_accessTokenRevoked = true;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
catch (HostedRunnerDeprovisionedException)
|
||||||
|
{
|
||||||
|
Trace.Info("Hosted runner has been deprovisioned.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (RunnerNotFoundException)
|
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -317,12 +329,19 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during get next message.");
|
Trace.Error("Catch exception during get next message.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
|
// clear out potential message for broker migration,
|
||||||
|
// in case the exception is thrown from get message from broker-listener.
|
||||||
|
message = null;
|
||||||
|
|
||||||
// don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs.
|
// don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs.
|
||||||
if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success))
|
if (!HostContext.AllowAuthMigration &&
|
||||||
|
ex is TaskAgentSessionExpiredException &&
|
||||||
|
!_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success))
|
||||||
{
|
{
|
||||||
Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session.");
|
Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session.");
|
||||||
}
|
}
|
||||||
else if (!IsGetNextMessageExceptionRetriable(ex))
|
else if (!HostContext.AllowAuthMigration &&
|
||||||
|
!IsGetNextMessageExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -349,6 +368,12 @@ namespace GitHub.Runner.Listener
|
|||||||
encounteringError = true;
|
encounteringError = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (HostContext.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
Trace.Info("Disable migration mode for 60 minutes.");
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||||
|
}
|
||||||
|
|
||||||
// re-create VssConnection before next retry
|
// re-create VssConnection before next retry
|
||||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||||
|
|
||||||
@@ -406,10 +431,26 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task RefreshListenerTokenAsync(CancellationToken cancellationToken)
|
public async Task RefreshListenerTokenAsync()
|
||||||
{
|
{
|
||||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||||
await _brokerServer.ForceRefreshConnection(_creds);
|
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
|
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||||
|
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||||
|
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||||
|
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||||
|
runnerRequestId,
|
||||||
|
_session.SessionId,
|
||||||
|
_runnerStatus,
|
||||||
|
BuildConstants.RunnerPackage.Version,
|
||||||
|
VarUtil.OS,
|
||||||
|
VarUtil.OSArchitecture,
|
||||||
|
linkedCts.Token);
|
||||||
}
|
}
|
||||||
|
|
||||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||||
@@ -528,7 +569,8 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
else if (ex is TaskAgentPoolNotFoundException ||
|
else if (ex is TaskAgentPoolNotFoundException ||
|
||||||
ex is AccessDeniedException ||
|
ex is AccessDeniedException ||
|
||||||
ex is VssUnauthorizedException)
|
ex is VssUnauthorizedException ||
|
||||||
|
(ex is VssOAuthTokenRequestException oauthEx && oauthEx.Error != "server_error"))
|
||||||
{
|
{
|
||||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||||
return false;
|
return false;
|
||||||
@@ -539,5 +581,11 @@ namespace GitHub.Runner.Listener
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||||
|
{
|
||||||
|
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||||
|
_needRefreshCredsV2 = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@
|
|||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
|
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
||||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.0" />
|
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
using System;
|
using System;
|
||||||
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Reflection;
|
using System.Reflection;
|
||||||
using System.Runtime.CompilerServices;
|
using System.Runtime.CompilerServices;
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
|
using System.Security.Claims;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
@@ -14,7 +16,9 @@ using GitHub.Runner.Common.Util;
|
|||||||
using GitHub.Runner.Listener.Check;
|
using GitHub.Runner.Listener.Check;
|
||||||
using GitHub.Runner.Listener.Configuration;
|
using GitHub.Runner.Listener.Configuration;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.OAuth;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
|
using GitHub.Services.WebApi.Jwt;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener
|
namespace GitHub.Runner.Listener
|
||||||
@@ -31,6 +35,14 @@ namespace GitHub.Runner.Listener
|
|||||||
private ITerminal _term;
|
private ITerminal _term;
|
||||||
private bool _inConfigStage;
|
private bool _inConfigStage;
|
||||||
private ManualResetEvent _completedCommand = new(false);
|
private ManualResetEvent _completedCommand = new(false);
|
||||||
|
private readonly ConcurrentQueue<string> _authMigrationTelemetries = new();
|
||||||
|
private Task _authMigrationTelemetryTask;
|
||||||
|
private readonly object _authMigrationTelemetryLock = new();
|
||||||
|
private Task _authMigrationClaimsCheckTask;
|
||||||
|
private readonly object _authMigrationClaimsCheckLock = new();
|
||||||
|
private IRunnerServer _runnerServer;
|
||||||
|
private CancellationTokenSource _authMigrationTelemetryTokenSource = new();
|
||||||
|
private CancellationTokenSource _authMigrationClaimsCheckTokenSource = new();
|
||||||
|
|
||||||
// <summary>
|
// <summary>
|
||||||
// Helps avoid excessive calls to Run Service when encountering non-retriable errors from /acquirejob.
|
// Helps avoid excessive calls to Run Service when encountering non-retriable errors from /acquirejob.
|
||||||
@@ -51,6 +63,7 @@ namespace GitHub.Runner.Listener
|
|||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
_term = HostContext.GetService<ITerminal>();
|
_term = HostContext.GetService<ITerminal>();
|
||||||
_acquireJobThrottler = HostContext.CreateService<IErrorThrottler>();
|
_acquireJobThrottler = HostContext.CreateService<IErrorThrottler>();
|
||||||
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<int> ExecuteCommand(CommandSettings command)
|
public async Task<int> ExecuteCommand(CommandSettings command)
|
||||||
@@ -66,6 +79,8 @@ namespace GitHub.Runner.Listener
|
|||||||
//register a SIGTERM handler
|
//register a SIGTERM handler
|
||||||
HostContext.Unloading += Runner_Unloading;
|
HostContext.Unloading += Runner_Unloading;
|
||||||
|
|
||||||
|
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||||
|
|
||||||
// TODO Unit test to cover this logic
|
// TODO Unit test to cover this logic
|
||||||
Trace.Info(nameof(ExecuteCommand));
|
Trace.Info(nameof(ExecuteCommand));
|
||||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
@@ -300,8 +315,17 @@ namespace GitHub.Runner.Listener
|
|||||||
_term.WriteLine("https://docs.github.com/en/actions/hosting-your-own-runners/autoscaling-with-self-hosted-runners#using-ephemeral-runners-for-autoscaling", ConsoleColor.Yellow);
|
_term.WriteLine("https://docs.github.com/en/actions/hosting-your-own-runners/autoscaling-with-self-hosted-runners#using-ephemeral-runners-for-autoscaling", ConsoleColor.Yellow);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var cred = store.GetCredentials();
|
||||||
|
if (cred != null &&
|
||||||
|
cred.Scheme == Constants.Configuration.OAuth &&
|
||||||
|
cred.Data.ContainsKey("EnableAuthMigrationByDefault"))
|
||||||
|
{
|
||||||
|
Trace.Info("Enable auth migration by default.");
|
||||||
|
HostContext.EnableAuthMigration("EnableAuthMigrationByDefault");
|
||||||
|
}
|
||||||
|
|
||||||
// Run the runner interactively or as service
|
// Run the runner interactively or as service
|
||||||
return await RunAsync(settings, command.RunOnce || settings.Ephemeral);
|
return await ExecuteRunnerAsync(settings, command.RunOnce || settings.Ephemeral);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -311,6 +335,9 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
|
_authMigrationClaimsCheckTokenSource?.Cancel();
|
||||||
|
_authMigrationTelemetryTokenSource?.Cancel();
|
||||||
|
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||||
_term.CancelKeyPress -= CtrlCHandler;
|
_term.CancelKeyPress -= CtrlCHandler;
|
||||||
HostContext.Unloading -= Runner_Unloading;
|
HostContext.Unloading -= Runner_Unloading;
|
||||||
_completedCommand.Set();
|
_completedCommand.Set();
|
||||||
@@ -360,12 +387,12 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private IMessageListener GetMesageListener(RunnerSettings settings)
|
private IMessageListener GetMessageListener(RunnerSettings settings, bool isMigratedSettings = false)
|
||||||
{
|
{
|
||||||
if (settings.UseV2Flow)
|
if (settings.UseV2Flow)
|
||||||
{
|
{
|
||||||
Trace.Info($"Using BrokerMessageListener");
|
Trace.Info($"Using BrokerMessageListener");
|
||||||
var brokerListener = new BrokerMessageListener();
|
var brokerListener = new BrokerMessageListener(settings, isMigratedSettings);
|
||||||
brokerListener.Initialize(HostContext);
|
brokerListener.Initialize(HostContext);
|
||||||
return brokerListener;
|
return brokerListener;
|
||||||
}
|
}
|
||||||
@@ -379,15 +406,65 @@ namespace GitHub.Runner.Listener
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
Trace.Info(nameof(RunAsync));
|
Trace.Info(nameof(RunAsync));
|
||||||
_listener = GetMesageListener(settings);
|
|
||||||
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
|
// First try using migrated settings if available
|
||||||
if (createSessionResult == CreateSessionResult.SessionConflict)
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
|
RunnerSettings migratedSettings = null;
|
||||||
|
|
||||||
|
try
|
||||||
{
|
{
|
||||||
return Constants.Runner.ReturnCode.SessionConflict;
|
migratedSettings = configManager.LoadMigratedSettings();
|
||||||
|
Trace.Info("Loaded migrated settings from .runner_migrated file");
|
||||||
|
Trace.Info(migratedSettings);
|
||||||
}
|
}
|
||||||
else if (createSessionResult == CreateSessionResult.Failure)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
return Constants.Runner.ReturnCode.TerminatedError;
|
// If migrated settings file doesn't exist or can't be loaded, we'll use the provided settings
|
||||||
|
Trace.Info($"Failed to load migrated settings: {ex.Message}");
|
||||||
|
}
|
||||||
|
|
||||||
|
bool usedMigratedSettings = false;
|
||||||
|
|
||||||
|
if (migratedSettings != null)
|
||||||
|
{
|
||||||
|
// Try to create session with migrated settings first
|
||||||
|
Trace.Info("Attempting to create session using migrated settings");
|
||||||
|
_listener = GetMessageListener(migratedSettings, isMigratedSettings: true);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
|
||||||
|
if (createSessionResult == CreateSessionResult.Success)
|
||||||
|
{
|
||||||
|
Trace.Info("Successfully created session with migrated settings");
|
||||||
|
settings = migratedSettings; // Use migrated settings for the rest of the process
|
||||||
|
usedMigratedSettings = true;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Warning($"Failed to create session with migrated settings: {createSessionResult}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Exception when creating session with migrated settings: {ex}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If migrated settings weren't used or session creation failed, use original settings
|
||||||
|
if (!usedMigratedSettings)
|
||||||
|
{
|
||||||
|
Trace.Info("Falling back to original .runner settings");
|
||||||
|
_listener = GetMessageListener(settings);
|
||||||
|
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
|
||||||
|
if (createSessionResult == CreateSessionResult.SessionConflict)
|
||||||
|
{
|
||||||
|
return Constants.Runner.ReturnCode.SessionConflict;
|
||||||
|
}
|
||||||
|
else if (createSessionResult == CreateSessionResult.Failure)
|
||||||
|
{
|
||||||
|
return Constants.Runner.ReturnCode.TerminatedError;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
HostContext.WritePerfCounter("SessionCreated");
|
HostContext.WritePerfCounter("SessionCreated");
|
||||||
@@ -401,6 +478,8 @@ namespace GitHub.Runner.Listener
|
|||||||
// Should we try to cleanup ephemeral runners
|
// Should we try to cleanup ephemeral runners
|
||||||
bool runOnceJobCompleted = false;
|
bool runOnceJobCompleted = false;
|
||||||
bool skipSessionDeletion = false;
|
bool skipSessionDeletion = false;
|
||||||
|
bool restartSession = false; // Flag to indicate session restart
|
||||||
|
bool restartSessionPending = false;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var notification = HostContext.GetService<IJobNotification>();
|
var notification = HostContext.GetService<IJobNotification>();
|
||||||
@@ -416,6 +495,15 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
while (!HostContext.RunnerShutdownToken.IsCancellationRequested)
|
while (!HostContext.RunnerShutdownToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
|
// Check if we need to restart the session and can do so (job dispatcher not busy)
|
||||||
|
if (restartSessionPending && !jobDispatcher.Busy)
|
||||||
|
{
|
||||||
|
Trace.Info("Pending session restart detected and job dispatcher is not busy. Restarting session now.");
|
||||||
|
messageQueueLoopTokenSource.Cancel();
|
||||||
|
restartSession = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
TaskAgentMessage message = null;
|
TaskAgentMessage message = null;
|
||||||
bool skipMessageDeletion = false;
|
bool skipMessageDeletion = false;
|
||||||
try
|
try
|
||||||
@@ -566,25 +654,45 @@ namespace GitHub.Runner.Listener
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
|
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
|
||||||
|
|
||||||
|
// Acknowledge (best-effort)
|
||||||
|
if (messageRef.ShouldAcknowledge) // Temporary feature flag
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await _listener.AcknowledgeMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Best-effort acknowledge failed for request '{messageRef.RunnerRequestId}'");
|
||||||
|
Trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
|
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
|
||||||
|
|
||||||
// Create connection
|
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
|
||||||
var creds = credMgr.LoadCredentials();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
|
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
|
||||||
{
|
{
|
||||||
|
// Connect
|
||||||
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
|
var creds = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||||
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
|
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
|
||||||
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||||
|
|
||||||
|
// Get job message
|
||||||
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
// Connect
|
||||||
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
|
var credsV2 = credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
var runServer = HostContext.CreateService<IRunServer>();
|
var runServer = HostContext.CreateService<IRunServer>();
|
||||||
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
|
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), credsV2);
|
||||||
|
|
||||||
|
// Get job message
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageRef.BillingOwnerId, messageQueueLoopTokenSource.Token);
|
||||||
_acquireJobThrottler.Reset();
|
_acquireJobThrottler.Reset();
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (
|
catch (Exception ex) when (
|
||||||
@@ -599,11 +707,21 @@ namespace GitHub.Runner.Listener
|
|||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
Trace.Error($"Caught exception from acquiring job message: {ex}");
|
Trace.Error($"Caught exception from acquiring job message: {ex}");
|
||||||
|
|
||||||
|
if (HostContext.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
Trace.Info("Disable migration mode for 60 minutes.");
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Acquire job failed with exception: {ex}");
|
||||||
|
}
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Dispatch
|
||||||
jobDispatcher.Run(jobRequestMessage, runOnce);
|
jobDispatcher.Run(jobRequestMessage, runOnce);
|
||||||
|
|
||||||
|
// Run once?
|
||||||
if (runOnce)
|
if (runOnce)
|
||||||
{
|
{
|
||||||
Trace.Info("One time used runner received job message.");
|
Trace.Info("One time used runner received job message.");
|
||||||
@@ -633,7 +751,29 @@ namespace GitHub.Runner.Listener
|
|||||||
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
|
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
|
||||||
{
|
{
|
||||||
Trace.Info("Received ForceTokenRefreshMessage");
|
Trace.Info("Received ForceTokenRefreshMessage");
|
||||||
await _listener.RefreshListenerTokenAsync(messageQueueLoopTokenSource.Token);
|
await _listener.RefreshListenerTokenAsync();
|
||||||
|
}
|
||||||
|
else if (string.Equals(message.MessageType, RunnerRefreshConfigMessage.MessageType))
|
||||||
|
{
|
||||||
|
var runnerRefreshConfigMessage = JsonUtility.FromString<RunnerRefreshConfigMessage>(message.Body);
|
||||||
|
Trace.Info($"Received RunnerRefreshConfigMessage for '{runnerRefreshConfigMessage.ConfigType}' config file");
|
||||||
|
var configUpdater = HostContext.GetService<IRunnerConfigUpdater>();
|
||||||
|
await configUpdater.UpdateRunnerConfigAsync(
|
||||||
|
runnerQualifiedId: runnerRefreshConfigMessage.RunnerQualifiedId,
|
||||||
|
configType: runnerRefreshConfigMessage.ConfigType,
|
||||||
|
serviceType: runnerRefreshConfigMessage.ServiceType,
|
||||||
|
configRefreshUrl: runnerRefreshConfigMessage.ConfigRefreshUrl);
|
||||||
|
|
||||||
|
// Set flag to schedule session restart if ConfigType is "runner"
|
||||||
|
if (string.Equals(runnerRefreshConfigMessage.ConfigType, "runner", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
Trace.Info("Runner configuration was updated. Session restart has been scheduled");
|
||||||
|
restartSessionPending = true;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info($"No session restart needed for config type: {runnerRefreshConfigMessage.ConfigType}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -688,19 +828,243 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (settings.Ephemeral && runOnceJobCompleted)
|
if (settings.Ephemeral && runOnceJobCompleted)
|
||||||
{
|
{
|
||||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
|
||||||
configManager.DeleteLocalRunnerConfig();
|
configManager.DeleteLocalRunnerConfig();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// After cleanup, check if we need to restart the session
|
||||||
|
if (restartSession)
|
||||||
|
{
|
||||||
|
Trace.Info("Restarting runner session after config update...");
|
||||||
|
return Constants.Runner.ReturnCode.RunnerConfigurationRefreshed;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (TaskAgentAccessTokenExpiredException)
|
catch (TaskAgentAccessTokenExpiredException)
|
||||||
{
|
{
|
||||||
Trace.Info("Runner OAuth token has been revoked. Shutting down.");
|
Trace.Info("Runner OAuth token has been revoked. Shutting down.");
|
||||||
}
|
}
|
||||||
|
catch (HostedRunnerDeprovisionedException)
|
||||||
|
{
|
||||||
|
Trace.Info("Hosted runner has been deprovisioned. Shutting down.");
|
||||||
|
}
|
||||||
|
|
||||||
return Constants.Runner.ReturnCode.Success;
|
return Constants.Runner.ReturnCode.Success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task<int> ExecuteRunnerAsync(RunnerSettings settings, bool runOnce)
|
||||||
|
{
|
||||||
|
int returnCode = Constants.Runner.ReturnCode.Success;
|
||||||
|
bool restart = false;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
restart = false;
|
||||||
|
returnCode = await RunAsync(settings, runOnce);
|
||||||
|
|
||||||
|
if (returnCode == Constants.Runner.ReturnCode.RunnerConfigurationRefreshed)
|
||||||
|
{
|
||||||
|
Trace.Info("Runner configuration was refreshed, restarting session...");
|
||||||
|
// Reload settings in case they changed
|
||||||
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
|
settings = configManager.LoadSettings();
|
||||||
|
restart = true;
|
||||||
|
}
|
||||||
|
} while (restart);
|
||||||
|
|
||||||
|
return returnCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void HandleAuthMigrationChanged(object sender, AuthMigrationEventArgs e)
|
||||||
|
{
|
||||||
|
Trace.Verbose("Handle AuthMigrationChanged in Runner");
|
||||||
|
_authMigrationTelemetries.Enqueue($"{DateTime.UtcNow.ToString("O")}: {e.Trace}");
|
||||||
|
|
||||||
|
// only start the telemetry reporting task once auth migration is changed (enabled or disabled)
|
||||||
|
lock (_authMigrationTelemetryLock)
|
||||||
|
{
|
||||||
|
if (_authMigrationTelemetryTask == null)
|
||||||
|
{
|
||||||
|
_authMigrationTelemetryTask = ReportAuthMigrationTelemetryAsync(_authMigrationTelemetryTokenSource.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// only start the claims check task once auth migration is changed (enabled or disabled)
|
||||||
|
lock (_authMigrationClaimsCheckLock)
|
||||||
|
{
|
||||||
|
if (_authMigrationClaimsCheckTask == null)
|
||||||
|
{
|
||||||
|
_authMigrationClaimsCheckTask = CheckOAuthTokenClaimsAsync(_authMigrationClaimsCheckTokenSource.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task CheckOAuthTokenClaimsAsync(CancellationToken token)
|
||||||
|
{
|
||||||
|
string[] expectedClaims =
|
||||||
|
[
|
||||||
|
"owner_id",
|
||||||
|
"runner_id",
|
||||||
|
"runner_group_id",
|
||||||
|
"scale_set_id",
|
||||||
|
"is_ephemeral",
|
||||||
|
"labels"
|
||||||
|
];
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
|
while (!token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await HostContext.Delay(TimeSpan.FromMinutes(100), token);
|
||||||
|
}
|
||||||
|
catch (TaskCanceledException)
|
||||||
|
{
|
||||||
|
// Ignore cancellation
|
||||||
|
}
|
||||||
|
|
||||||
|
if (token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!HostContext.AllowAuthMigration)
|
||||||
|
{
|
||||||
|
Trace.Info("Skip checking oauth token claims since auth migration is disabled.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var baselineCred = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||||
|
var authV2Cred = credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||||
|
|
||||||
|
if (!(baselineCred.Federated is VssOAuthCredential baselineVssOAuthCred) ||
|
||||||
|
!(authV2Cred.Federated is VssOAuthCredential vssOAuthCredV2) ||
|
||||||
|
baselineVssOAuthCred == null ||
|
||||||
|
vssOAuthCredV2 == null)
|
||||||
|
{
|
||||||
|
Trace.Info("Skip checking oauth token claims for non-oauth credentials");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (string.Equals(baselineVssOAuthCred.AuthorizationUrl.AbsoluteUri, vssOAuthCredV2.AuthorizationUrl.AbsoluteUri, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
Trace.Info("Skip checking oauth token claims for same authorization url");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var baselineProvider = baselineVssOAuthCred.GetTokenProvider(baselineVssOAuthCred.AuthorizationUrl);
|
||||||
|
var v2Provider = vssOAuthCredV2.GetTokenProvider(vssOAuthCredV2.AuthorizationUrl);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
|
using (var requestTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token, timeoutTokenSource.Token))
|
||||||
|
{
|
||||||
|
var baselineToken = await baselineProvider.GetTokenAsync(null, requestTokenSource.Token);
|
||||||
|
var v2Token = await v2Provider.GetTokenAsync(null, requestTokenSource.Token);
|
||||||
|
if (baselineToken is VssOAuthAccessToken baselineAccessToken &&
|
||||||
|
v2Token is VssOAuthAccessToken v2AccessToken &&
|
||||||
|
!string.IsNullOrEmpty(baselineAccessToken.Value) &&
|
||||||
|
!string.IsNullOrEmpty(v2AccessToken.Value))
|
||||||
|
{
|
||||||
|
var baselineJwt = JsonWebToken.Create(baselineAccessToken.Value);
|
||||||
|
var baselineClaims = baselineJwt.ExtractClaims();
|
||||||
|
var v2Jwt = JsonWebToken.Create(v2AccessToken.Value);
|
||||||
|
var v2Claims = v2Jwt.ExtractClaims();
|
||||||
|
|
||||||
|
// Log extracted claims for debugging
|
||||||
|
Trace.Verbose($"Baseline token expected claims: {string.Join(", ", baselineClaims
|
||||||
|
.Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant()))
|
||||||
|
.Select(c => $"{c.Type}:{c.Value}"))}");
|
||||||
|
Trace.Verbose($"V2 token expected claims: {string.Join(", ", v2Claims
|
||||||
|
.Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant()))
|
||||||
|
.Select(c => $"{c.Type}:{c.Value}"))}");
|
||||||
|
|
||||||
|
foreach (var claim in expectedClaims)
|
||||||
|
{
|
||||||
|
// if baseline has the claim, v2 should have it too with exactly same value.
|
||||||
|
if (baselineClaims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim) is Claim baselineClaim &&
|
||||||
|
!string.IsNullOrEmpty(baselineClaim?.Value))
|
||||||
|
{
|
||||||
|
var v2Claim = v2Claims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim);
|
||||||
|
if (v2Claim?.Value != baselineClaim.Value)
|
||||||
|
{
|
||||||
|
Trace.Info($"Token Claim mismatch between two issuers. Expected: {baselineClaim.Type}:{baselineClaim.Value}. Actual: {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}");
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Expected claim {baselineClaim.Type}:{baselineClaim.Value} does not match {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("OAuth token claims check passed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error("Failed to fetch and check OAuth token claims.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error("Failed to check OAuth token claims in background.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ReportAuthMigrationTelemetryAsync(CancellationToken token)
|
||||||
|
{
|
||||||
|
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||||
|
var runnerSettings = configManager.LoadSettings();
|
||||||
|
|
||||||
|
while (!token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
|
||||||
|
}
|
||||||
|
catch (TaskCanceledException)
|
||||||
|
{
|
||||||
|
// Ignore cancellation
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Verbose("Checking for auth migration telemetry to report");
|
||||||
|
while (_authMigrationTelemetries.TryDequeue(out var telemetry))
|
||||||
|
{
|
||||||
|
Trace.Verbose($"Reporting auth migration telemetry: {telemetry}");
|
||||||
|
if (runnerSettings != null)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
|
{
|
||||||
|
await _runnerServer.UpdateAgentUpdateStateAsync(runnerSettings.PoolId, runnerSettings.AgentId, "RefreshConfig", telemetry, tokenSource.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error("Failed to report auth migration telemetry.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
_authMigrationTelemetries.Enqueue(telemetry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!token.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await HostContext.Delay(TimeSpan.FromSeconds(10), token);
|
||||||
|
}
|
||||||
|
catch (TaskCanceledException)
|
||||||
|
{
|
||||||
|
// Ignore cancellation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void PrintUsage(CommandSettings command)
|
private void PrintUsage(CommandSettings command)
|
||||||
{
|
{
|
||||||
string separator;
|
string separator;
|
||||||
|
|||||||
287
src/Runner.Listener/RunnerConfigUpdater.cs
Normal file
287
src/Runner.Listener/RunnerConfigUpdater.cs
Normal file
@@ -0,0 +1,287 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Listener
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(RunnerConfigUpdater))]
|
||||||
|
public interface IRunnerConfigUpdater : IRunnerService
|
||||||
|
{
|
||||||
|
Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class RunnerConfigUpdater : RunnerService, IRunnerConfigUpdater
|
||||||
|
{
|
||||||
|
private RunnerSettings _settings;
|
||||||
|
private CredentialData _credData;
|
||||||
|
private IRunnerServer _runnerServer;
|
||||||
|
private IConfigurationStore _store;
|
||||||
|
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
_store = hostContext.GetService<IConfigurationStore>();
|
||||||
|
_settings = _store.GetSettings();
|
||||||
|
_credData = _store.GetCredentials();
|
||||||
|
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
ArgUtil.NotNullOrEmpty(runnerQualifiedId, nameof(runnerQualifiedId));
|
||||||
|
ArgUtil.NotNullOrEmpty(configType, nameof(configType));
|
||||||
|
ArgUtil.NotNullOrEmpty(serviceType, nameof(serviceType));
|
||||||
|
ArgUtil.NotNullOrEmpty(configRefreshUrl, nameof(configRefreshUrl));
|
||||||
|
|
||||||
|
// make sure the runner qualified id matches the current runner
|
||||||
|
if (!await VerifyRunnerQualifiedId(runnerQualifiedId))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// keep the timeout short to avoid blocking the main thread
|
||||||
|
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
|
{
|
||||||
|
switch (configType.ToLowerInvariant())
|
||||||
|
{
|
||||||
|
case "runner":
|
||||||
|
await UpdateRunnerSettingsAsync(serviceType, configRefreshUrl, tokenSource.Token);
|
||||||
|
break;
|
||||||
|
case "credentials":
|
||||||
|
await UpdateRunnerCredentialsAsync(serviceType, configRefreshUrl, tokenSource.Token);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
Trace.Error($"Invalid config type '{configType}'.");
|
||||||
|
await ReportTelemetryAsync($"Invalid config type '{configType}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to update runner '{configType}' config.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
await ReportTelemetryAsync($"Failed to update runner '{configType}' config: {ex}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task UpdateRunnerSettingsAsync(string serviceType, string configRefreshUrl, CancellationToken token)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
// read the current runner settings and encode with base64
|
||||||
|
var runnerConfig = HostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
||||||
|
string runnerConfigContent = File.ReadAllText(runnerConfig, Encoding.UTF8);
|
||||||
|
var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(runnerConfigContent));
|
||||||
|
if (string.IsNullOrEmpty(encodedConfig))
|
||||||
|
{
|
||||||
|
await ReportTelemetryAsync("Failed to get encoded runner settings.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// exchange the encoded runner settings with the service
|
||||||
|
string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "runner", configRefreshUrl, token);
|
||||||
|
if (string.IsNullOrEmpty(refreshedEncodedConfig))
|
||||||
|
{
|
||||||
|
// service will return empty string if there is no change in the config
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig));
|
||||||
|
RunnerSettings refreshedRunnerConfig;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
refreshedRunnerConfig = StringUtil.ConvertFromJson<RunnerSettings>(decodedConfig);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to convert runner config from json '{decodedConfig}'.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
await ReportTelemetryAsync($"Failed to convert runner config '{decodedConfig}' from json: {ex}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure the runner id and name in the refreshed config match the current runner
|
||||||
|
if (refreshedRunnerConfig?.AgentId != _settings.AgentId)
|
||||||
|
{
|
||||||
|
Trace.Error($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'.");
|
||||||
|
await ReportTelemetryAsync($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (refreshedRunnerConfig?.AgentName != _settings.AgentName)
|
||||||
|
{
|
||||||
|
Trace.Error($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'.");
|
||||||
|
await ReportTelemetryAsync($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// save the refreshed runner settings as a separate file
|
||||||
|
_store.SaveMigratedSettings(refreshedRunnerConfig);
|
||||||
|
await ReportTelemetryAsync("Runner settings updated successfully.");
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task UpdateRunnerCredentialsAsync(string serviceType, string configRefreshUrl, CancellationToken token)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
// read the current runner credentials and encode with base64
|
||||||
|
var credConfig = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
|
string credConfigContent = File.ReadAllText(credConfig, Encoding.UTF8);
|
||||||
|
var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(credConfigContent));
|
||||||
|
if (string.IsNullOrEmpty(encodedConfig))
|
||||||
|
{
|
||||||
|
await ReportTelemetryAsync("Failed to get encoded credentials.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
CredentialData currentCred = _store.GetCredentials();
|
||||||
|
if (currentCred == null)
|
||||||
|
{
|
||||||
|
await ReportTelemetryAsync("Failed to get current credentials.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we only support refreshing OAuth credentials which is used by self-hosted runners.
|
||||||
|
if (currentCred.Scheme != Constants.Configuration.OAuth)
|
||||||
|
{
|
||||||
|
await ReportTelemetryAsync($"Not supported credential scheme '{currentCred.Scheme}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// exchange the encoded runner credentials with the service
|
||||||
|
string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "credentials", configRefreshUrl, token);
|
||||||
|
if (string.IsNullOrEmpty(refreshedEncodedConfig))
|
||||||
|
{
|
||||||
|
// service will return empty string if there is no change in the config
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig));
|
||||||
|
CredentialData refreshedCredConfig;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
refreshedCredConfig = StringUtil.ConvertFromJson<CredentialData>(decodedConfig);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to convert credentials config from json '{decodedConfig}'.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
await ReportTelemetryAsync($"Failed to convert credentials config '{decodedConfig}' from json: {ex}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure the credential scheme in the refreshed config match the current credential scheme
|
||||||
|
if (refreshedCredConfig?.Scheme != _credData.Scheme)
|
||||||
|
{
|
||||||
|
Trace.Error($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'.");
|
||||||
|
await ReportTelemetryAsync($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_credData.Scheme == Constants.Configuration.OAuth)
|
||||||
|
{
|
||||||
|
// make sure the credential clientId in the refreshed config match the current credential clientId for OAuth auth scheme
|
||||||
|
var clientId = _credData.Data.GetValueOrDefault("clientId", null);
|
||||||
|
var refreshedClientId = refreshedCredConfig.Data.GetValueOrDefault("clientId", null);
|
||||||
|
if (clientId != refreshedClientId)
|
||||||
|
{
|
||||||
|
Trace.Error($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'.");
|
||||||
|
await ReportTelemetryAsync($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure the credential authorizationUrl in the refreshed config match the current credential authorizationUrl for OAuth auth scheme
|
||||||
|
var authorizationUrl = _credData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||||
|
var refreshedAuthorizationUrl = refreshedCredConfig.Data.GetValueOrDefault("authorizationUrl", null);
|
||||||
|
if (authorizationUrl != refreshedAuthorizationUrl)
|
||||||
|
{
|
||||||
|
Trace.Error($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'.");
|
||||||
|
await ReportTelemetryAsync($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// save the refreshed runner credentials as a separate file
|
||||||
|
_store.SaveMigratedCredential(refreshedCredConfig);
|
||||||
|
|
||||||
|
if (refreshedCredConfig.Data.ContainsKey("authorizationUrlV2"))
|
||||||
|
{
|
||||||
|
HostContext.EnableAuthMigration("Credential file updated");
|
||||||
|
await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is enabled.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
HostContext.DeferAuthMigration(TimeSpan.FromDays(365), "Credential file does not contain authorizationUrlV2");
|
||||||
|
await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is disabled.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<bool> VerifyRunnerQualifiedId(string runnerQualifiedId)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
Trace.Info($"Verifying runner qualified id: {runnerQualifiedId}");
|
||||||
|
var idParts = runnerQualifiedId.Split("/", StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
if (idParts.Length != 4 || idParts[3] != _settings.AgentId.ToString())
|
||||||
|
{
|
||||||
|
Trace.Error($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'.");
|
||||||
|
await ReportTelemetryAsync($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<string> RefreshRunnerConfigAsync(string encodedConfig, string serviceType, string configType, string configRefreshUrl, CancellationToken token)
|
||||||
|
{
|
||||||
|
string refreshedEncodedConfig;
|
||||||
|
switch (serviceType.ToLowerInvariant())
|
||||||
|
{
|
||||||
|
case "pipelines":
|
||||||
|
try
|
||||||
|
{
|
||||||
|
refreshedEncodedConfig = await _runnerServer.RefreshRunnerConfigAsync((int)_settings.AgentId, configType, encodedConfig, token);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to refresh runner {configType} config with service.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
await ReportTelemetryAsync($"Failed to refresh {configType} config: {ex}");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "runner-admin":
|
||||||
|
throw new NotSupportedException("Runner admin service is not supported.");
|
||||||
|
default:
|
||||||
|
Trace.Error($"Invalid service type '{serviceType}'.");
|
||||||
|
await ReportTelemetryAsync($"Invalid service type '{serviceType}'.");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return refreshedEncodedConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ReportTelemetryAsync(string telemetry)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||||
|
{
|
||||||
|
await _runnerServer.UpdateAgentUpdateStateAsync(_settings.PoolId, _settings.AgentId, "RefreshConfig", telemetry, tokenSource.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error("Failed to report telemetry.");
|
||||||
|
Trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,9 +7,17 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
[DataMember(Name = "id")]
|
[DataMember(Name = "id")]
|
||||||
public string Id { get; set; }
|
public string Id { get; set; }
|
||||||
|
|
||||||
[DataMember(Name = "runner_request_id")]
|
[DataMember(Name = "runner_request_id")]
|
||||||
public string RunnerRequestId { get; set; }
|
public string RunnerRequestId { get; set; }
|
||||||
|
|
||||||
|
[DataMember(Name = "should_acknowledge")]
|
||||||
|
public bool ShouldAcknowledge { get; set; }
|
||||||
|
|
||||||
[DataMember(Name = "run_service_url")]
|
[DataMember(Name = "run_service_url")]
|
||||||
public string RunServiceUrl { get; set; }
|
public string RunServiceUrl { get; set; }
|
||||||
|
|
||||||
|
[DataMember(Name = "billing_owner_id")]
|
||||||
|
public string BillingOwnerId { get; set; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||||
{
|
{
|
||||||
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||||
|
RawClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||||
}
|
}
|
||||||
|
|
||||||
var rawHeaderValues = new List<ProductInfoHeaderValue>();
|
var rawHeaderValues = new List<ProductInfoHeaderValue>();
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
<?xml version="1.0" encoding="utf-8" ?>
|
<?xml version="1.0" encoding="utf-8" ?>
|
||||||
<configuration>
|
<configuration>
|
||||||
<startup>
|
<startup>
|
||||||
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.5" />
|
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.7" />
|
||||||
</startup>
|
</startup>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
@@ -18,7 +18,7 @@
|
|||||||
<TargetFrameworkVersion>v4.8</TargetFrameworkVersion>
|
<TargetFrameworkVersion>v4.8</TargetFrameworkVersion>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
<PropertyGroup Condition=" '$(PackageRuntime)' != 'win-arm64' ">
|
<PropertyGroup Condition=" '$(PackageRuntime)' != 'win-arm64' ">
|
||||||
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
|
<TargetFrameworkVersion>v4.7</TargetFrameworkVersion>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||||
<PlatformTarget>AnyCPU</PlatformTarget>
|
<PlatformTarget>AnyCPU</PlatformTarget>
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||||
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||||
executionContext.InfrastructureError(ex.Message);
|
executionContext.InfrastructureError(ex.Message, category: "resolve_action");
|
||||||
executionContext.Result = TaskResult.Failed;
|
executionContext.Result = TaskResult.Failed;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -119,7 +119,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||||
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||||
executionContext.InfrastructureError(ex.Message);
|
executionContext.InfrastructureError(ex.Message, category: "invalid_action_download");
|
||||||
executionContext.Result = TaskResult.Failed;
|
executionContext.Result = TaskResult.Failed;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
@@ -378,7 +378,7 @@ namespace GitHub.Runner.Worker
|
|||||||
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
||||||
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
|
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
|
||||||
{
|
{
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||||
if (File.Exists(manifestFile))
|
if (File.Exists(manifestFile))
|
||||||
{
|
{
|
||||||
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
||||||
@@ -688,7 +688,8 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
if (MessageUtil.IsRunServiceJob(executionContext.Global.Variables.Get(Constants.Variables.System.JobRequestType)))
|
if (MessageUtil.IsRunServiceJob(executionContext.Global.Variables.Get(Constants.Variables.System.JobRequestType)))
|
||||||
{
|
{
|
||||||
actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
var displayHelpfulActionsDownloadErrors = executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.DisplayHelpfulActionsDownloadErrors) ?? false;
|
||||||
|
actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken, displayHelpfulActionsDownloadErrors);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -963,7 +964,7 @@ namespace GitHub.Runner.Worker
|
|||||||
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
|
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
|
||||||
{
|
{
|
||||||
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||||
ActionDefinitionData actionDefinitionData = null;
|
ActionDefinitionData actionDefinitionData = null;
|
||||||
if (File.Exists(actionManifest))
|
if (File.Exists(actionManifest))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -2,29 +2,29 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
|
using System.Reflection;
|
||||||
|
using System.Linq;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System.Reflection;
|
using GitHub.Actions.WorkflowParser;
|
||||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
using GitHub.Actions.WorkflowParser.Conversion;
|
||||||
using GitHub.DistributedTask.ObjectTemplating.Schema;
|
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||||
using GitHub.DistributedTask.ObjectTemplating;
|
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
|
||||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
using GitHub.Actions.Expressions.Data;
|
||||||
using System.Linq;
|
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
{
|
{
|
||||||
[ServiceLocator(Default = typeof(ActionManifestManager))]
|
[ServiceLocator(Default = typeof(ActionManifestManager))]
|
||||||
public interface IActionManifestManager : IRunnerService
|
public interface IActionManifestManager : IRunnerService
|
||||||
{
|
{
|
||||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile);
|
||||||
|
|
||||||
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
DictionaryExpressionData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||||
|
|
||||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||||
|
|
||||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||||
|
|
||||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||||
}
|
}
|
||||||
@@ -50,10 +50,10 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
|
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
|
||||||
}
|
}
|
||||||
|
|
||||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile)
|
||||||
{
|
{
|
||||||
var templateContext = CreateTemplateContext(executionContext);
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
ActionDefinitionData actionDefinition = new();
|
ActionDefinitionDataNew actionDefinition = new();
|
||||||
|
|
||||||
// Clean up file name real quick
|
// Clean up file name real quick
|
||||||
// Instead of using Regex which can be computationally expensive,
|
// Instead of using Regex which can be computationally expensive,
|
||||||
@@ -160,21 +160,21 @@ namespace GitHub.Runner.Worker
|
|||||||
return actionDefinition;
|
return actionDefinition;
|
||||||
}
|
}
|
||||||
|
|
||||||
public DictionaryContextData EvaluateCompositeOutputs(
|
public DictionaryExpressionData EvaluateCompositeOutputs(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
TemplateToken token,
|
TemplateToken token,
|
||||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||||
{
|
{
|
||||||
var result = default(DictionaryContextData);
|
DictionaryExpressionData result = null;
|
||||||
|
|
||||||
if (token != null)
|
if (token != null)
|
||||||
{
|
{
|
||||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null);
|
||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
result = token.ToContextData().AssertDictionary("composite outputs");
|
result = token.ToExpressionData().AssertDictionary("composite outputs");
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
{
|
{
|
||||||
@@ -184,13 +184,13 @@ namespace GitHub.Runner.Worker
|
|||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
}
|
}
|
||||||
|
|
||||||
return result ?? new DictionaryContextData();
|
return result ?? new DictionaryExpressionData();
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<string> EvaluateContainerArguments(
|
public List<string> EvaluateContainerArguments(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
SequenceToken token,
|
SequenceToken token,
|
||||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||||
{
|
{
|
||||||
var result = new List<string>();
|
var result = new List<string>();
|
||||||
|
|
||||||
@@ -199,7 +199,7 @@ namespace GitHub.Runner.Worker
|
|||||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null);
|
||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
@@ -229,7 +229,7 @@ namespace GitHub.Runner.Worker
|
|||||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
MappingToken token,
|
MappingToken token,
|
||||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||||
{
|
{
|
||||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
@@ -238,7 +238,7 @@ namespace GitHub.Runner.Worker
|
|||||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null);
|
||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
@@ -281,7 +281,7 @@ namespace GitHub.Runner.Worker
|
|||||||
var templateContext = CreateTemplateContext(executionContext);
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null);
|
||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
@@ -303,7 +303,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
private TemplateContext CreateTemplateContext(
|
private TemplateContext CreateTemplateContext(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
IDictionary<string, ExpressionData> extraExpressionValues = null)
|
||||||
{
|
{
|
||||||
var result = new TemplateContext
|
var result = new TemplateContext
|
||||||
{
|
{
|
||||||
@@ -314,13 +314,18 @@ namespace GitHub.Runner.Worker
|
|||||||
maxEvents: 1000000,
|
maxEvents: 1000000,
|
||||||
maxBytes: 10 * 1024 * 1024),
|
maxBytes: 10 * 1024 * 1024),
|
||||||
Schema = _actionManifestSchema,
|
Schema = _actionManifestSchema,
|
||||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
// TODO: Switch to real tracewriter for cutover
|
||||||
|
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
|
||||||
|
AllowCaseFunction = false,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Expression values from execution context
|
// Expression values from execution context
|
||||||
foreach (var pair in executionContext.ExpressionValues)
|
foreach (var pair in executionContext.ExpressionValues)
|
||||||
{
|
{
|
||||||
result.ExpressionValues[pair.Key] = pair.Value;
|
// Convert old PipelineContextData to new ExpressionData
|
||||||
|
var json = StringUtil.ConvertToJson(pair.Value, Newtonsoft.Json.Formatting.None);
|
||||||
|
var newValue = StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.ExpressionData>(json);
|
||||||
|
result.ExpressionValues[pair.Key] = newValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extra expression values
|
// Extra expression values
|
||||||
@@ -332,10 +337,19 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Expression functions from execution context
|
// Expression functions
|
||||||
foreach (var item in executionContext.ExpressionFunctions)
|
foreach (var func in executionContext.ExpressionFunctions)
|
||||||
{
|
{
|
||||||
result.ExpressionFunctions.Add(item);
|
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
|
||||||
|
{
|
||||||
|
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||||
|
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||||
|
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||||
|
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||||
|
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||||
|
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported in ActionManifestManager")
|
||||||
|
};
|
||||||
|
result.ExpressionFunctions.Add(newFunc);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the file table from the Execution Context
|
// Add the file table from the Execution Context
|
||||||
@@ -368,7 +382,7 @@ namespace GitHub.Runner.Worker
|
|||||||
var postToken = default(StringToken);
|
var postToken = default(StringToken);
|
||||||
var postEntrypointToken = default(StringToken);
|
var postEntrypointToken = default(StringToken);
|
||||||
var postIfToken = default(StringToken);
|
var postIfToken = default(StringToken);
|
||||||
var steps = default(List<Pipelines.Step>);
|
var steps = default(List<GitHub.Actions.WorkflowParser.IStep>);
|
||||||
|
|
||||||
foreach (var run in runsMapping)
|
foreach (var run in runsMapping)
|
||||||
{
|
{
|
||||||
@@ -416,7 +430,7 @@ namespace GitHub.Runner.Worker
|
|||||||
break;
|
break;
|
||||||
case "steps":
|
case "steps":
|
||||||
var stepsToken = run.Value.AssertSequence("steps");
|
var stepsToken = run.Value.AssertSequence("steps");
|
||||||
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
steps = WorkflowTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||||
templateContext.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
@@ -435,7 +449,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return new ContainerActionExecutionData()
|
return new ContainerActionExecutionDataNew()
|
||||||
{
|
{
|
||||||
Image = imageToken.Value,
|
Image = imageToken.Value,
|
||||||
Arguments = argsToken,
|
Arguments = argsToken,
|
||||||
@@ -450,7 +464,8 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
||||||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
|
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
|
||||||
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase))
|
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||||
{
|
{
|
||||||
@@ -477,11 +492,11 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return new CompositeActionExecutionData()
|
return new CompositeActionExecutionDataNew()
|
||||||
{
|
{
|
||||||
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
Steps = steps,
|
||||||
PreSteps = new List<Pipelines.ActionStep>(),
|
PreSteps = new List<GitHub.Actions.WorkflowParser.IStep>(),
|
||||||
PostSteps = new Stack<Pipelines.ActionStep>(),
|
PostSteps = new Stack<GitHub.Actions.WorkflowParser.IStep>(),
|
||||||
InitCondition = "always()",
|
InitCondition = "always()",
|
||||||
CleanupCondition = "always()",
|
CleanupCondition = "always()",
|
||||||
Outputs = outputs
|
Outputs = outputs
|
||||||
@@ -490,7 +505,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16' or 'node20' instead.");
|
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (pluginToken != null)
|
else if (pluginToken != null)
|
||||||
@@ -501,12 +516,12 @@ namespace GitHub.Runner.Worker
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.");
|
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private void ConvertInputs(
|
private void ConvertInputs(
|
||||||
TemplateToken inputsToken,
|
TemplateToken inputsToken,
|
||||||
ActionDefinitionData actionDefinition)
|
ActionDefinitionDataNew actionDefinition)
|
||||||
{
|
{
|
||||||
actionDefinition.Inputs = new MappingToken(null, null, null);
|
actionDefinition.Inputs = new MappingToken(null, null, null);
|
||||||
var inputsMapping = inputsToken.AssertMapping("inputs");
|
var inputsMapping = inputsToken.AssertMapping("inputs");
|
||||||
@@ -541,5 +556,49 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class ActionDefinitionDataNew
|
||||||
|
{
|
||||||
|
public string Name { get; set; }
|
||||||
|
|
||||||
|
public string Description { get; set; }
|
||||||
|
|
||||||
|
public MappingToken Inputs { get; set; }
|
||||||
|
|
||||||
|
public ActionExecutionData Execution { get; set; }
|
||||||
|
|
||||||
|
public Dictionary<String, String> Deprecated { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ContainerActionExecutionDataNew : ActionExecutionData
|
||||||
|
{
|
||||||
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
|
||||||
|
|
||||||
|
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||||
|
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||||
|
|
||||||
|
public string Image { get; set; }
|
||||||
|
|
||||||
|
public string EntryPoint { get; set; }
|
||||||
|
|
||||||
|
public SequenceToken Arguments { get; set; }
|
||||||
|
|
||||||
|
public MappingToken Environment { get; set; }
|
||||||
|
|
||||||
|
public string Pre { get; set; }
|
||||||
|
|
||||||
|
public string Post { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class CompositeActionExecutionDataNew : ActionExecutionData
|
||||||
|
{
|
||||||
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
|
||||||
|
public override bool HasPre => PreSteps.Count > 0;
|
||||||
|
public override bool HasPost => PostSteps.Count > 0;
|
||||||
|
public List<GitHub.Actions.WorkflowParser.IStep> PreSteps { get; set; }
|
||||||
|
public List<GitHub.Actions.WorkflowParser.IStep> Steps { get; set; }
|
||||||
|
public Stack<GitHub.Actions.WorkflowParser.IStep> PostSteps { get; set; }
|
||||||
|
public MappingToken Outputs { get; set; }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
547
src/Runner.Worker/ActionManifestManagerLegacy.cs
Normal file
547
src/Runner.Worker/ActionManifestManagerLegacy.cs
Normal file
@@ -0,0 +1,547 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Threading;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using System.Reflection;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating.Schema;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using System.Linq;
|
||||||
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(ActionManifestManagerLegacy))]
|
||||||
|
public interface IActionManifestManagerLegacy : IRunnerService
|
||||||
|
{
|
||||||
|
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||||
|
|
||||||
|
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ActionManifestManagerLegacy : RunnerService, IActionManifestManagerLegacy
|
||||||
|
{
|
||||||
|
private TemplateSchema _actionManifestSchema;
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
|
||||||
|
var assembly = Assembly.GetExecutingAssembly();
|
||||||
|
var json = default(string);
|
||||||
|
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Worker.action_yaml.json"))
|
||||||
|
using (var streamReader = new StreamReader(stream))
|
||||||
|
{
|
||||||
|
json = streamReader.ReadToEnd();
|
||||||
|
}
|
||||||
|
|
||||||
|
var objectReader = new JsonObjectReader(null, json);
|
||||||
|
_actionManifestSchema = TemplateSchema.Load(objectReader);
|
||||||
|
ArgUtil.NotNull(_actionManifestSchema, nameof(_actionManifestSchema));
|
||||||
|
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
|
||||||
|
}
|
||||||
|
|
||||||
|
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
|
ActionDefinitionData actionDefinition = new();
|
||||||
|
|
||||||
|
// Clean up file name real quick
|
||||||
|
// Instead of using Regex which can be computationally expensive,
|
||||||
|
// we can just remove the # of characters from the fileName according to the length of the basePath
|
||||||
|
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
|
||||||
|
string fileRelativePath = manifestFile;
|
||||||
|
if (manifestFile.Contains(basePath))
|
||||||
|
{
|
||||||
|
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var token = default(TemplateToken);
|
||||||
|
|
||||||
|
// Get the file ID
|
||||||
|
var fileId = templateContext.GetFileId(fileRelativePath);
|
||||||
|
|
||||||
|
// Add this file to the FileTable in executionContext if it hasn't been added already
|
||||||
|
// we use > since fileID is 1 indexed
|
||||||
|
if (fileId > executionContext.Global.FileTable.Count)
|
||||||
|
{
|
||||||
|
executionContext.Global.FileTable.Add(fileRelativePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the file
|
||||||
|
var fileContent = File.ReadAllText(manifestFile);
|
||||||
|
using (var stringReader = new StringReader(fileContent))
|
||||||
|
{
|
||||||
|
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
|
||||||
|
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
|
||||||
|
}
|
||||||
|
|
||||||
|
var actionMapping = token.AssertMapping("action manifest root");
|
||||||
|
var actionOutputs = default(MappingToken);
|
||||||
|
var actionRunValueToken = default(TemplateToken);
|
||||||
|
|
||||||
|
foreach (var actionPair in actionMapping)
|
||||||
|
{
|
||||||
|
var propertyName = actionPair.Key.AssertString($"action.yml property key");
|
||||||
|
|
||||||
|
switch (propertyName.Value)
|
||||||
|
{
|
||||||
|
case "name":
|
||||||
|
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "outputs":
|
||||||
|
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "description":
|
||||||
|
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "inputs":
|
||||||
|
ConvertInputs(actionPair.Value, actionDefinition);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "runs":
|
||||||
|
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
|
||||||
|
actionRunValueToken = actionPair.Value;
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
Trace.Info($"Ignore action property {propertyName}.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Evaluate Runs Last
|
||||||
|
if (actionRunValueToken != null)
|
||||||
|
{
|
||||||
|
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (templateContext.Errors.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var error in templateContext.Errors)
|
||||||
|
{
|
||||||
|
Trace.Error($"Action.yml load error: {error.Message}");
|
||||||
|
executionContext.Error(error.Message);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new ArgumentException($"Failed to load {fileRelativePath}");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (actionDefinition.Execution == null)
|
||||||
|
{
|
||||||
|
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||||
|
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||||
|
}
|
||||||
|
|
||||||
|
return actionDefinition;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DictionaryContextData EvaluateCompositeOutputs(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
TemplateToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
var result = default(DictionaryContextData);
|
||||||
|
|
||||||
|
if (token != null)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
|
{
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result ?? new DictionaryContextData();
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<string> EvaluateContainerArguments(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
SequenceToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
var result = new List<string>();
|
||||||
|
|
||||||
|
if (token != null)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
|
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
|
// Sequence
|
||||||
|
var args = evaluateResult.AssertSequence("container args");
|
||||||
|
|
||||||
|
foreach (var arg in args)
|
||||||
|
{
|
||||||
|
var str = arg.AssertString("container arg").Value;
|
||||||
|
result.Add(str);
|
||||||
|
Trace.Info($"Add argument {str}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
MappingToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
if (token != null)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
|
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
|
// Mapping
|
||||||
|
var mapping = evaluateResult.AssertMapping("container env");
|
||||||
|
|
||||||
|
foreach (var pair in mapping)
|
||||||
|
{
|
||||||
|
// Literal key
|
||||||
|
var key = pair.Key.AssertString("container env key");
|
||||||
|
|
||||||
|
// Literal value
|
||||||
|
var value = pair.Value.AssertString("container env value");
|
||||||
|
result[key.Value] = value.Value;
|
||||||
|
|
||||||
|
Trace.Info($"Add env {key} = {value}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public string EvaluateDefaultInput(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
string inputName,
|
||||||
|
TemplateToken token)
|
||||||
|
{
|
||||||
|
string result = "";
|
||||||
|
if (token != null)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
|
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
|
// String
|
||||||
|
result = evaluateResult.AssertString($"default value for input '{inputName}'").Value;
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
|
{
|
||||||
|
Trace.Error(ex);
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private TemplateContext CreateTemplateContext(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||||
|
{
|
||||||
|
var result = new TemplateContext
|
||||||
|
{
|
||||||
|
CancellationToken = CancellationToken.None,
|
||||||
|
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||||
|
Memory = new TemplateMemory(
|
||||||
|
maxDepth: 100,
|
||||||
|
maxEvents: 1000000,
|
||||||
|
maxBytes: 10 * 1024 * 1024),
|
||||||
|
Schema = _actionManifestSchema,
|
||||||
|
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||||
|
AllowCaseFunction = false,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Expression values from execution context
|
||||||
|
foreach (var pair in executionContext.ExpressionValues)
|
||||||
|
{
|
||||||
|
result.ExpressionValues[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extra expression values
|
||||||
|
if (extraExpressionValues?.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var pair in extraExpressionValues)
|
||||||
|
{
|
||||||
|
result.ExpressionValues[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Expression functions from execution context
|
||||||
|
foreach (var item in executionContext.ExpressionFunctions)
|
||||||
|
{
|
||||||
|
result.ExpressionFunctions.Add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the file table from the Execution Context
|
||||||
|
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
|
||||||
|
{
|
||||||
|
result.GetFileId(executionContext.Global.FileTable[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ActionExecutionData ConvertRuns(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
TemplateContext templateContext,
|
||||||
|
TemplateToken inputsToken,
|
||||||
|
String fileRelativePath,
|
||||||
|
MappingToken outputs = null)
|
||||||
|
{
|
||||||
|
var runsMapping = inputsToken.AssertMapping("runs");
|
||||||
|
var usingToken = default(StringToken);
|
||||||
|
var imageToken = default(StringToken);
|
||||||
|
var argsToken = default(SequenceToken);
|
||||||
|
var entrypointToken = default(StringToken);
|
||||||
|
var envToken = default(MappingToken);
|
||||||
|
var mainToken = default(StringToken);
|
||||||
|
var pluginToken = default(StringToken);
|
||||||
|
var preToken = default(StringToken);
|
||||||
|
var preEntrypointToken = default(StringToken);
|
||||||
|
var preIfToken = default(StringToken);
|
||||||
|
var postToken = default(StringToken);
|
||||||
|
var postEntrypointToken = default(StringToken);
|
||||||
|
var postIfToken = default(StringToken);
|
||||||
|
var steps = default(List<Pipelines.Step>);
|
||||||
|
|
||||||
|
foreach (var run in runsMapping)
|
||||||
|
{
|
||||||
|
var runsKey = run.Key.AssertString("runs key").Value;
|
||||||
|
switch (runsKey)
|
||||||
|
{
|
||||||
|
case "using":
|
||||||
|
usingToken = run.Value.AssertString("using");
|
||||||
|
break;
|
||||||
|
case "image":
|
||||||
|
imageToken = run.Value.AssertString("image");
|
||||||
|
break;
|
||||||
|
case "args":
|
||||||
|
argsToken = run.Value.AssertSequence("args");
|
||||||
|
break;
|
||||||
|
case "entrypoint":
|
||||||
|
entrypointToken = run.Value.AssertString("entrypoint");
|
||||||
|
break;
|
||||||
|
case "env":
|
||||||
|
envToken = run.Value.AssertMapping("env");
|
||||||
|
break;
|
||||||
|
case "main":
|
||||||
|
mainToken = run.Value.AssertString("main");
|
||||||
|
break;
|
||||||
|
case "plugin":
|
||||||
|
pluginToken = run.Value.AssertString("plugin");
|
||||||
|
break;
|
||||||
|
case "post":
|
||||||
|
postToken = run.Value.AssertString("post");
|
||||||
|
break;
|
||||||
|
case "post-entrypoint":
|
||||||
|
postEntrypointToken = run.Value.AssertString("post-entrypoint");
|
||||||
|
break;
|
||||||
|
case "post-if":
|
||||||
|
postIfToken = run.Value.AssertString("post-if");
|
||||||
|
break;
|
||||||
|
case "pre":
|
||||||
|
preToken = run.Value.AssertString("pre");
|
||||||
|
break;
|
||||||
|
case "pre-entrypoint":
|
||||||
|
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
|
||||||
|
break;
|
||||||
|
case "pre-if":
|
||||||
|
preIfToken = run.Value.AssertString("pre-if");
|
||||||
|
break;
|
||||||
|
case "steps":
|
||||||
|
var stepsToken = run.Value.AssertSequence("steps");
|
||||||
|
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
Trace.Info($"Ignore run property {runsKey}.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (usingToken != null)
|
||||||
|
{
|
||||||
|
if (string.Equals(usingToken.Value, "docker", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(imageToken?.Value))
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return new ContainerActionExecutionData()
|
||||||
|
{
|
||||||
|
Image = imageToken.Value,
|
||||||
|
Arguments = argsToken,
|
||||||
|
EntryPoint = entrypointToken?.Value,
|
||||||
|
Environment = envToken,
|
||||||
|
Pre = preEntrypointToken?.Value,
|
||||||
|
InitCondition = preIfToken?.Value ?? "always()",
|
||||||
|
Post = postEntrypointToken?.Value,
|
||||||
|
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) ||
|
||||||
|
string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return new NodeJSActionExecutionData()
|
||||||
|
{
|
||||||
|
NodeVersion = usingToken.Value,
|
||||||
|
Script = mainToken.Value,
|
||||||
|
Pre = preToken?.Value,
|
||||||
|
InitCondition = preIfToken?.Value ?? "always()",
|
||||||
|
Post = postToken?.Value,
|
||||||
|
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (steps == null)
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return new CompositeActionExecutionData()
|
||||||
|
{
|
||||||
|
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||||
|
PreSteps = new List<Pipelines.ActionStep>(),
|
||||||
|
PostSteps = new Stack<Pipelines.ActionStep>(),
|
||||||
|
InitCondition = "always()",
|
||||||
|
CleanupCondition = "always()",
|
||||||
|
Outputs = outputs
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (pluginToken != null)
|
||||||
|
{
|
||||||
|
return new PluginActionExecutionData()
|
||||||
|
{
|
||||||
|
Plugin = pluginToken.Value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ConvertInputs(
|
||||||
|
TemplateToken inputsToken,
|
||||||
|
ActionDefinitionData actionDefinition)
|
||||||
|
{
|
||||||
|
actionDefinition.Inputs = new MappingToken(null, null, null);
|
||||||
|
var inputsMapping = inputsToken.AssertMapping("inputs");
|
||||||
|
foreach (var input in inputsMapping)
|
||||||
|
{
|
||||||
|
bool hasDefault = false;
|
||||||
|
var inputName = input.Key.AssertString("input name");
|
||||||
|
var inputMetadata = input.Value.AssertMapping("input metadata");
|
||||||
|
foreach (var metadata in inputMetadata)
|
||||||
|
{
|
||||||
|
var metadataName = metadata.Key.AssertString("input metadata").Value;
|
||||||
|
if (string.Equals(metadataName, "default", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
hasDefault = true;
|
||||||
|
actionDefinition.Inputs.Add(inputName, metadata.Value);
|
||||||
|
}
|
||||||
|
else if (string.Equals(metadataName, "deprecationMessage", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (actionDefinition.Deprecated == null)
|
||||||
|
{
|
||||||
|
actionDefinition.Deprecated = new Dictionary<String, String>();
|
||||||
|
}
|
||||||
|
var message = metadata.Value.AssertString("input deprecationMessage");
|
||||||
|
actionDefinition.Deprecated.Add(inputName.Value, message.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!hasDefault)
|
||||||
|
{
|
||||||
|
actionDefinition.Inputs.Add(inputName, new StringToken(null, null, null, string.Empty));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
701
src/Runner.Worker/ActionManifestManagerWrapper.cs
Normal file
701
src/Runner.Worker/ActionManifestManagerWrapper.cs
Normal file
@@ -0,0 +1,701 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using GitHub.Actions.WorkflowParser;
|
||||||
|
using GitHub.DistributedTask.Pipelines;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(ActionManifestManagerWrapper))]
|
||||||
|
public interface IActionManifestManagerWrapper : IRunnerService
|
||||||
|
{
|
||||||
|
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||||
|
|
||||||
|
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class ActionManifestManagerWrapper : RunnerService, IActionManifestManagerWrapper
|
||||||
|
{
|
||||||
|
private IActionManifestManagerLegacy _legacyManager;
|
||||||
|
private IActionManifestManager _newManager;
|
||||||
|
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
_legacyManager = hostContext.GetService<IActionManifestManagerLegacy>();
|
||||||
|
_newManager = hostContext.GetService<IActionManifestManager>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||||
|
{
|
||||||
|
return EvaluateAndCompare(
|
||||||
|
executionContext,
|
||||||
|
"Load",
|
||||||
|
() => _legacyManager.Load(executionContext, manifestFile),
|
||||||
|
() => ConvertToLegacyActionDefinitionData(_newManager.Load(executionContext, manifestFile)),
|
||||||
|
(legacyResult, newResult) => CompareActionDefinition(legacyResult, newResult));
|
||||||
|
}
|
||||||
|
|
||||||
|
public DictionaryContextData EvaluateCompositeOutputs(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
TemplateToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
return EvaluateAndCompare(
|
||||||
|
executionContext,
|
||||||
|
"EvaluateCompositeOutputs",
|
||||||
|
() => _legacyManager.EvaluateCompositeOutputs(executionContext, token, extraExpressionValues),
|
||||||
|
() => ConvertToLegacyContextData<DictionaryContextData>(_newManager.EvaluateCompositeOutputs(executionContext, ConvertToNewToken(token), ConvertToNewExpressionValues(extraExpressionValues))),
|
||||||
|
(legacyResult, newResult) => CompareDictionaryContextData(legacyResult, newResult));
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<string> EvaluateContainerArguments(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
SequenceToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
return EvaluateAndCompare(
|
||||||
|
executionContext,
|
||||||
|
"EvaluateContainerArguments",
|
||||||
|
() => _legacyManager.EvaluateContainerArguments(executionContext, token, extraExpressionValues),
|
||||||
|
() => _newManager.EvaluateContainerArguments(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.SequenceToken, ConvertToNewExpressionValues(extraExpressionValues)),
|
||||||
|
(legacyResult, newResult) => CompareLists(legacyResult, newResult, "ContainerArguments"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
MappingToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
return EvaluateAndCompare(
|
||||||
|
executionContext,
|
||||||
|
"EvaluateContainerEnvironment",
|
||||||
|
() => _legacyManager.EvaluateContainerEnvironment(executionContext, token, extraExpressionValues),
|
||||||
|
() => _newManager.EvaluateContainerEnvironment(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.MappingToken, ConvertToNewExpressionValues(extraExpressionValues)),
|
||||||
|
(legacyResult, newResult) => {
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
return CompareDictionaries(trace, legacyResult, newResult, "ContainerEnvironment");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public string EvaluateDefaultInput(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
string inputName,
|
||||||
|
TemplateToken token)
|
||||||
|
{
|
||||||
|
return EvaluateAndCompare(
|
||||||
|
executionContext,
|
||||||
|
"EvaluateDefaultInput",
|
||||||
|
() => _legacyManager.EvaluateDefaultInput(executionContext, inputName, token),
|
||||||
|
() => _newManager.EvaluateDefaultInput(executionContext, inputName, ConvertToNewToken(token)),
|
||||||
|
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Conversion helper methods
|
||||||
|
private ActionDefinitionData ConvertToLegacyActionDefinitionData(ActionDefinitionDataNew newData)
|
||||||
|
{
|
||||||
|
if (newData == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ActionDefinitionData
|
||||||
|
{
|
||||||
|
Name = newData.Name,
|
||||||
|
Description = newData.Description,
|
||||||
|
Inputs = ConvertToLegacyToken<MappingToken>(newData.Inputs),
|
||||||
|
Deprecated = newData.Deprecated,
|
||||||
|
Execution = ConvertToLegacyExecution(newData.Execution)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private ActionExecutionData ConvertToLegacyExecution(ActionExecutionData execution)
|
||||||
|
{
|
||||||
|
if (execution == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle different execution types
|
||||||
|
if (execution is ContainerActionExecutionDataNew containerNew)
|
||||||
|
{
|
||||||
|
return new ContainerActionExecutionData
|
||||||
|
{
|
||||||
|
Image = containerNew.Image,
|
||||||
|
EntryPoint = containerNew.EntryPoint,
|
||||||
|
Arguments = ConvertToLegacyToken<SequenceToken>(containerNew.Arguments),
|
||||||
|
Environment = ConvertToLegacyToken<MappingToken>(containerNew.Environment),
|
||||||
|
Pre = containerNew.Pre,
|
||||||
|
Post = containerNew.Post,
|
||||||
|
InitCondition = containerNew.InitCondition,
|
||||||
|
CleanupCondition = containerNew.CleanupCondition
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else if (execution is CompositeActionExecutionDataNew compositeNew)
|
||||||
|
{
|
||||||
|
return new CompositeActionExecutionData
|
||||||
|
{
|
||||||
|
Steps = ConvertToLegacySteps(compositeNew.Steps),
|
||||||
|
Outputs = ConvertToLegacyToken<MappingToken>(compositeNew.Outputs)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// For NodeJS and Plugin execution, they don't use new token types, so just return as-is
|
||||||
|
return execution;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<GitHub.DistributedTask.Pipelines.ActionStep> ConvertToLegacySteps(List<GitHub.Actions.WorkflowParser.IStep> newSteps)
|
||||||
|
{
|
||||||
|
if (newSteps == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize new steps and deserialize to old steps
|
||||||
|
var json = StringUtil.ConvertToJson(newSteps, Newtonsoft.Json.Formatting.None);
|
||||||
|
return StringUtil.ConvertFromJson<List<GitHub.DistributedTask.Pipelines.ActionStep>>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
private T ConvertToLegacyToken<T>(GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newToken) where T : TemplateToken
|
||||||
|
{
|
||||||
|
if (newToken == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serialize and deserialize to convert between token types
|
||||||
|
var json = StringUtil.ConvertToJson(newToken, Newtonsoft.Json.Formatting.None);
|
||||||
|
return StringUtil.ConvertFromJson<T>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToNewToken(TemplateToken legacyToken)
|
||||||
|
{
|
||||||
|
if (legacyToken == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var json = StringUtil.ConvertToJson(legacyToken, Newtonsoft.Json.Formatting.None);
|
||||||
|
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData> ConvertToNewExpressionValues(IDictionary<string, PipelineContextData> legacyValues)
|
||||||
|
{
|
||||||
|
if (legacyValues == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var json = StringUtil.ConvertToJson(legacyValues, Newtonsoft.Json.Formatting.None);
|
||||||
|
return StringUtil.ConvertFromJson<IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData>>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
private T ConvertToLegacyContextData<T>(GitHub.Actions.Expressions.Data.ExpressionData newData) where T : PipelineContextData
|
||||||
|
{
|
||||||
|
if (newData == null)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var json = StringUtil.ConvertToJson(newData, Newtonsoft.Json.Formatting.None);
|
||||||
|
return StringUtil.ConvertFromJson<T>(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comparison helper methods
|
||||||
|
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
|
||||||
|
IExecutionContext context,
|
||||||
|
string methodName,
|
||||||
|
Func<TLegacy> legacyEvaluator,
|
||||||
|
Func<TNew> newEvaluator,
|
||||||
|
Func<TLegacy, TNew, bool> resultComparer)
|
||||||
|
{
|
||||||
|
// Legacy only?
|
||||||
|
if (!((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false)
|
||||||
|
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER"))))
|
||||||
|
{
|
||||||
|
return legacyEvaluator();
|
||||||
|
}
|
||||||
|
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
|
||||||
|
// Legacy evaluator
|
||||||
|
var legacyException = default(Exception);
|
||||||
|
var legacyResult = default(TLegacy);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
legacyResult = legacyEvaluator();
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
legacyException = ex;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare with new evaluator
|
||||||
|
try
|
||||||
|
{
|
||||||
|
ArgUtil.NotNull(context, nameof(context));
|
||||||
|
trace.Info(methodName);
|
||||||
|
|
||||||
|
// New evaluator
|
||||||
|
var newException = default(Exception);
|
||||||
|
var newResult = default(TNew);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
newResult = newEvaluator();
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
newException = ex;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare results or exceptions
|
||||||
|
if (legacyException != null || newException != null)
|
||||||
|
{
|
||||||
|
// Either one or both threw exceptions - compare them
|
||||||
|
if (!CompareExceptions(trace, legacyException, newException))
|
||||||
|
{
|
||||||
|
trace.Info($"{methodName} exception mismatch");
|
||||||
|
RecordMismatch(context, $"{methodName}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Both succeeded - compare results
|
||||||
|
// Skip comparison if new implementation returns null (not yet implemented)
|
||||||
|
if (newResult != null && !resultComparer(legacyResult, newResult))
|
||||||
|
{
|
||||||
|
trace.Info($"{methodName} mismatch");
|
||||||
|
RecordMismatch(context, $"{methodName}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
trace.Info($"Comparison failed: {ex.Message}");
|
||||||
|
RecordComparisonError(context, $"{methodName}: {ex.Message}");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-throw legacy exception if any
|
||||||
|
if (legacyException != null)
|
||||||
|
{
|
||||||
|
throw legacyException;
|
||||||
|
}
|
||||||
|
|
||||||
|
return legacyResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void RecordMismatch(IExecutionContext context, string methodName)
|
||||||
|
{
|
||||||
|
if (!context.Global.HasActionManifestMismatch)
|
||||||
|
{
|
||||||
|
context.Global.HasActionManifestMismatch = true;
|
||||||
|
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestMismatch: {methodName}" };
|
||||||
|
context.Global.JobTelemetry.Add(telemetry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void RecordComparisonError(IExecutionContext context, string errorDetails)
|
||||||
|
{
|
||||||
|
if (!context.Global.HasActionManifestMismatch)
|
||||||
|
{
|
||||||
|
context.Global.HasActionManifestMismatch = true;
|
||||||
|
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestComparisonError: {errorDetails}" };
|
||||||
|
context.Global.JobTelemetry.Add(telemetry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareActionDefinition(ActionDefinitionData legacyResult, ActionDefinitionData newResult)
|
||||||
|
{
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
if (legacyResult == null && newResult == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyResult == null || newResult == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareActionDefinition mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacyResult.Name, newResult.Name, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareActionDefinition mismatch - Name differs (legacy='{legacyResult.Name}', new='{newResult.Name}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacyResult.Description, newResult.Description, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareActionDefinition mismatch - Description differs (legacy='{legacyResult.Description}', new='{newResult.Description}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Inputs token
|
||||||
|
var legacyInputsJson = legacyResult.Inputs != null ? StringUtil.ConvertToJson(legacyResult.Inputs) : null;
|
||||||
|
var newInputsJson = newResult.Inputs != null ? StringUtil.ConvertToJson(newResult.Inputs) : null;
|
||||||
|
if (!string.Equals(legacyInputsJson, newInputsJson, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareActionDefinition mismatch - Inputs differ");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Deprecated
|
||||||
|
if (!CompareDictionaries(trace, legacyResult.Deprecated, newResult.Deprecated, "Deprecated"))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Execution
|
||||||
|
if (!CompareExecution(trace, legacyResult.Execution, newResult.Execution))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareExecution(Tracing trace, ActionExecutionData legacy, ActionExecutionData newExecution)
|
||||||
|
{
|
||||||
|
if (legacy == null && newExecution == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacy == null || newExecution == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareExecution mismatch - one is null (legacy={legacy == null}, new={newExecution == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacy.GetType() != newExecution.GetType())
|
||||||
|
{
|
||||||
|
trace.Info($"CompareExecution mismatch - different types (legacy={legacy.GetType().Name}, new={newExecution.GetType().Name})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare based on type
|
||||||
|
if (legacy is NodeJSActionExecutionData legacyNode && newExecution is NodeJSActionExecutionData newNode)
|
||||||
|
{
|
||||||
|
return CompareNodeJSExecution(trace, legacyNode, newNode);
|
||||||
|
}
|
||||||
|
else if (legacy is ContainerActionExecutionData legacyContainer && newExecution is ContainerActionExecutionData newContainer)
|
||||||
|
{
|
||||||
|
return CompareContainerExecution(trace, legacyContainer, newContainer);
|
||||||
|
}
|
||||||
|
else if (legacy is CompositeActionExecutionData legacyComposite && newExecution is CompositeActionExecutionData newComposite)
|
||||||
|
{
|
||||||
|
return CompareCompositeExecution(trace, legacyComposite, newComposite);
|
||||||
|
}
|
||||||
|
else if (legacy is PluginActionExecutionData legacyPlugin && newExecution is PluginActionExecutionData newPlugin)
|
||||||
|
{
|
||||||
|
return ComparePluginExecution(trace, legacyPlugin, newPlugin);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareNodeJSExecution(Tracing trace, NodeJSActionExecutionData legacy, NodeJSActionExecutionData newExecution)
|
||||||
|
{
|
||||||
|
if (!string.Equals(legacy.NodeVersion, newExecution.NodeVersion, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - NodeVersion differs (legacy='{legacy.NodeVersion}', new='{newExecution.NodeVersion}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.Script, newExecution.Script, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - Script differs (legacy='{legacy.Script}', new='{newExecution.Script}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.Pre, newExecution.Pre, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - Pre differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.Post, newExecution.Post, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - Post differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.InitCondition, newExecution.InitCondition, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - InitCondition differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.CleanupCondition, newExecution.CleanupCondition, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareNodeJSExecution mismatch - CleanupCondition differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareContainerExecution(Tracing trace, ContainerActionExecutionData legacy, ContainerActionExecutionData newExecution)
|
||||||
|
{
|
||||||
|
if (!string.Equals(legacy.Image, newExecution.Image, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareContainerExecution mismatch - Image differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(legacy.EntryPoint, newExecution.EntryPoint, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareContainerExecution mismatch - EntryPoint differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Arguments token
|
||||||
|
var legacyArgsJson = legacy.Arguments != null ? StringUtil.ConvertToJson(legacy.Arguments) : null;
|
||||||
|
var newArgsJson = newExecution.Arguments != null ? StringUtil.ConvertToJson(newExecution.Arguments) : null;
|
||||||
|
if (!string.Equals(legacyArgsJson, newArgsJson, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareContainerExecution mismatch - Arguments differ");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Environment token
|
||||||
|
var legacyEnvJson = legacy.Environment != null ? StringUtil.ConvertToJson(legacy.Environment) : null;
|
||||||
|
var newEnvJson = newExecution.Environment != null ? StringUtil.ConvertToJson(newExecution.Environment) : null;
|
||||||
|
if (!string.Equals(legacyEnvJson, newEnvJson, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareContainerExecution mismatch - Environment differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareCompositeExecution(Tracing trace, CompositeActionExecutionData legacy, CompositeActionExecutionData newExecution)
|
||||||
|
{
|
||||||
|
// Compare Steps
|
||||||
|
if (legacy.Steps?.Count != newExecution.Steps?.Count)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareCompositeExecution mismatch - Steps.Count differs (legacy={legacy.Steps?.Count}, new={newExecution.Steps?.Count})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare Outputs token
|
||||||
|
var legacyOutputsJson = legacy.Outputs != null ? StringUtil.ConvertToJson(legacy.Outputs) : null;
|
||||||
|
var newOutputsJson = newExecution.Outputs != null ? StringUtil.ConvertToJson(newExecution.Outputs) : null;
|
||||||
|
if (!string.Equals(legacyOutputsJson, newOutputsJson, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareCompositeExecution mismatch - Outputs differ");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool ComparePluginExecution(Tracing trace, PluginActionExecutionData legacy, PluginActionExecutionData newExecution)
|
||||||
|
{
|
||||||
|
if (!string.Equals(legacy.Plugin, newExecution.Plugin, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"ComparePluginExecution mismatch - Plugin differs");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareDictionaryContextData(DictionaryContextData legacy, DictionaryContextData newData)
|
||||||
|
{
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
if (legacy == null && newData == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacy == null || newData == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaryContextData mismatch - one is null (legacy={legacy == null}, new={newData == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
var legacyJson = StringUtil.ConvertToJson(legacy);
|
||||||
|
var newJson = StringUtil.ConvertToJson(newData);
|
||||||
|
|
||||||
|
if (!string.Equals(legacyJson, newJson, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaryContextData mismatch");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
|
||||||
|
{
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
if (legacyList == null && newList == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyList == null || newList == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyList.Count != newList.Count)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < legacyList.Count; i++)
|
||||||
|
{
|
||||||
|
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareDictionaries(Tracing trace, IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
|
||||||
|
{
|
||||||
|
if (legacyDict == null && newDict == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyDict == null || newDict == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyDict is Dictionary<string, string> legacyTypedDict && newDict is Dictionary<string, string> newTypedDict)
|
||||||
|
{
|
||||||
|
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyDict.Count != newDict.Count)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var kvp in legacyDict)
|
||||||
|
{
|
||||||
|
if (!newDict.TryGetValue(kvp.Key, out var newValue))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool CompareExceptions(Tracing trace, Exception legacyException, Exception newException)
|
||||||
|
{
|
||||||
|
if (legacyException == null && newException == null)
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (legacyException == null || newException == null)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare exception messages recursively (including inner exceptions)
|
||||||
|
var legacyMessages = GetExceptionMessages(legacyException);
|
||||||
|
var newMessages = GetExceptionMessages(newException);
|
||||||
|
|
||||||
|
if (legacyMessages.Count != newMessages.Count)
|
||||||
|
{
|
||||||
|
trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < legacyMessages.Count; i++)
|
||||||
|
{
|
||||||
|
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private IList<string> GetExceptionMessages(Exception ex)
|
||||||
|
{
|
||||||
|
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||||
|
var messages = new List<string>();
|
||||||
|
var toProcess = new Queue<Exception>();
|
||||||
|
toProcess.Enqueue(ex);
|
||||||
|
int count = 0;
|
||||||
|
|
||||||
|
while (toProcess.Count > 0 && count < 50)
|
||||||
|
{
|
||||||
|
var current = toProcess.Dequeue();
|
||||||
|
if (current == null) continue;
|
||||||
|
|
||||||
|
messages.Add(current.Message);
|
||||||
|
count++;
|
||||||
|
|
||||||
|
// Special handling for AggregateException - enqueue all inner exceptions
|
||||||
|
if (current is AggregateException aggregateEx)
|
||||||
|
{
|
||||||
|
foreach (var innerEx in aggregateEx.InnerExceptions)
|
||||||
|
{
|
||||||
|
if (innerEx != null && count < 50)
|
||||||
|
{
|
||||||
|
toProcess.Enqueue(innerEx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (current.InnerException != null)
|
||||||
|
{
|
||||||
|
toProcess.Enqueue(current.InnerException);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Failsafe: if we have too many exceptions, stop and return what we have
|
||||||
|
if (count >= 50)
|
||||||
|
{
|
||||||
|
trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -206,7 +206,7 @@ namespace GitHub.Runner.Worker
|
|||||||
// Merge the default inputs from the definition
|
// Merge the default inputs from the definition
|
||||||
if (definition.Data?.Inputs != null)
|
if (definition.Data?.Inputs != null)
|
||||||
{
|
{
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||||
foreach (var input in definition.Data.Inputs)
|
foreach (var input in definition.Data.Inputs)
|
||||||
{
|
{
|
||||||
string key = input.Key.AssertString("action input name").Value;
|
string key = input.Key.AssertString("action input name").Value;
|
||||||
|
|||||||
1845
src/Runner.Worker/Dap/DapDebugSession.cs
Normal file
1845
src/Runner.Worker/Dap/DapDebugSession.cs
Normal file
File diff suppressed because it is too large
Load Diff
1125
src/Runner.Worker/Dap/DapMessages.cs
Normal file
1125
src/Runner.Worker/Dap/DapMessages.cs
Normal file
File diff suppressed because it is too large
Load Diff
480
src/Runner.Worker/Dap/DapServer.cs
Normal file
480
src/Runner.Worker/Dap/DapServer.cs
Normal file
@@ -0,0 +1,480 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Sockets;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using Newtonsoft.Json;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Dap
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// DAP Server interface for handling Debug Adapter Protocol connections.
|
||||||
|
/// </summary>
|
||||||
|
[ServiceLocator(Default = typeof(DapServer))]
|
||||||
|
public interface IDapServer : IRunnerService, IDisposable
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Starts the DAP TCP server on the specified port.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="port">The port to listen on (default: 4711)</param>
|
||||||
|
/// <param name="cancellationToken">Cancellation token</param>
|
||||||
|
Task StartAsync(int port, CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Blocks until a debug client connects.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="cancellationToken">Cancellation token</param>
|
||||||
|
Task WaitForConnectionAsync(CancellationToken cancellationToken);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Stops the DAP server and closes all connections.
|
||||||
|
/// </summary>
|
||||||
|
Task StopAsync();
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sets the debug session that will handle DAP requests.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="session">The debug session</param>
|
||||||
|
void SetSession(IDapDebugSession session);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sends an event to the connected debug client.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="evt">The event to send</param>
|
||||||
|
void SendEvent(Event evt);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets whether a debug client is currently connected.
|
||||||
|
/// </summary>
|
||||||
|
bool IsConnected { get; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// TCP server implementation of the Debug Adapter Protocol.
|
||||||
|
/// Handles message framing (Content-Length headers) and JSON serialization.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class DapServer : RunnerService, IDapServer
|
||||||
|
{
|
||||||
|
private const string ContentLengthHeader = "Content-Length: ";
|
||||||
|
private const string HeaderTerminator = "\r\n\r\n";
|
||||||
|
|
||||||
|
private TcpListener _listener;
|
||||||
|
private TcpClient _client;
|
||||||
|
private NetworkStream _stream;
|
||||||
|
private IDapDebugSession _session;
|
||||||
|
private CancellationTokenSource _cts;
|
||||||
|
private Task _messageLoopTask;
|
||||||
|
private TaskCompletionSource<bool> _connectionTcs;
|
||||||
|
private int _nextSeq = 1;
|
||||||
|
private readonly object _sendLock = new object();
|
||||||
|
private bool _disposed = false;
|
||||||
|
|
||||||
|
public bool IsConnected => _client?.Connected == true;
|
||||||
|
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
Trace.Info("DapServer initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void SetSession(IDapDebugSession session)
|
||||||
|
{
|
||||||
|
_session = session;
|
||||||
|
Trace.Info("Debug session set");
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task StartAsync(int port, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Trace.Info($"Starting DAP server on port {port}");
|
||||||
|
|
||||||
|
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||||
|
_connectionTcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
_listener = new TcpListener(IPAddress.Loopback, port);
|
||||||
|
_listener.Start();
|
||||||
|
Trace.Info($"DAP server listening on 127.0.0.1:{port}");
|
||||||
|
|
||||||
|
// Start accepting connections in the background
|
||||||
|
_ = AcceptConnectionAsync(_cts.Token);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to start DAP server: {ex.Message}");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
await Task.CompletedTask;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task AcceptConnectionAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info("Waiting for debug client connection...");
|
||||||
|
|
||||||
|
// Use cancellation-aware accept
|
||||||
|
using (cancellationToken.Register(() => _listener?.Stop()))
|
||||||
|
{
|
||||||
|
_client = await _listener.AcceptTcpClientAsync();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stream = _client.GetStream();
|
||||||
|
var remoteEndPoint = _client.Client.RemoteEndPoint;
|
||||||
|
Trace.Info($"Debug client connected from {remoteEndPoint}");
|
||||||
|
|
||||||
|
// Signal that connection is established
|
||||||
|
_connectionTcs.TrySetResult(true);
|
||||||
|
|
||||||
|
// Start processing messages
|
||||||
|
_messageLoopTask = ProcessMessagesAsync(_cts.Token);
|
||||||
|
}
|
||||||
|
catch (ObjectDisposedException) when (cancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
// Expected when cancellation stops the listener
|
||||||
|
Trace.Info("Connection accept cancelled");
|
||||||
|
_connectionTcs.TrySetCanceled();
|
||||||
|
}
|
||||||
|
catch (SocketException ex) when (cancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
// Expected when cancellation stops the listener
|
||||||
|
Trace.Info($"Connection accept cancelled: {ex.Message}");
|
||||||
|
_connectionTcs.TrySetCanceled();
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Error accepting connection: {ex.Message}");
|
||||||
|
_connectionTcs.TrySetException(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task WaitForConnectionAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Trace.Info("Waiting for debug client to connect...");
|
||||||
|
|
||||||
|
using (cancellationToken.Register(() => _connectionTcs.TrySetCanceled()))
|
||||||
|
{
|
||||||
|
await _connectionTcs.Task;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("Debug client connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task StopAsync()
|
||||||
|
{
|
||||||
|
Trace.Info("Stopping DAP server");
|
||||||
|
|
||||||
|
_cts?.Cancel();
|
||||||
|
|
||||||
|
// Wait for message loop to complete
|
||||||
|
if (_messageLoopTask != null)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await _messageLoopTask;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
// Expected
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Warning($"Message loop ended with error: {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up resources
|
||||||
|
_stream?.Close();
|
||||||
|
_client?.Close();
|
||||||
|
_listener?.Stop();
|
||||||
|
|
||||||
|
Trace.Info("DAP server stopped");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void SendEvent(Event evt)
|
||||||
|
{
|
||||||
|
if (!IsConnected)
|
||||||
|
{
|
||||||
|
Trace.Warning($"Cannot send event '{evt.EventType}': no client connected");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
lock (_sendLock)
|
||||||
|
{
|
||||||
|
evt.Seq = _nextSeq++;
|
||||||
|
SendMessageInternal(evt);
|
||||||
|
}
|
||||||
|
Trace.Info($"Sent event: {evt.EventType}");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to send event '{evt.EventType}': {ex.Message}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ProcessMessagesAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Trace.Info("Starting DAP message processing loop");
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
while (!cancellationToken.IsCancellationRequested && IsConnected)
|
||||||
|
{
|
||||||
|
var json = await ReadMessageAsync(cancellationToken);
|
||||||
|
if (json == null)
|
||||||
|
{
|
||||||
|
Trace.Info("Client disconnected (end of stream)");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
await ProcessMessageAsync(json, cancellationToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Info("Message processing cancelled");
|
||||||
|
}
|
||||||
|
catch (IOException ex)
|
||||||
|
{
|
||||||
|
Trace.Info($"Connection closed: {ex.Message}");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Error in message loop: {ex}");
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info("DAP message processing loop ended");
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task ProcessMessageAsync(string json, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
Request request = null;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Parse the incoming message
|
||||||
|
request = JsonConvert.DeserializeObject<Request>(json);
|
||||||
|
if (request == null || request.Type != "request")
|
||||||
|
{
|
||||||
|
Trace.Warning($"Received non-request message: {json}");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Received request: seq={request.Seq}, command={request.Command}");
|
||||||
|
|
||||||
|
// Dispatch to session for handling
|
||||||
|
if (_session == null)
|
||||||
|
{
|
||||||
|
Trace.Error("No debug session configured");
|
||||||
|
SendErrorResponse(request, "No debug session configured");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var response = await _session.HandleRequestAsync(request);
|
||||||
|
response.RequestSeq = request.Seq;
|
||||||
|
response.Command = request.Command;
|
||||||
|
response.Type = "response";
|
||||||
|
|
||||||
|
lock (_sendLock)
|
||||||
|
{
|
||||||
|
response.Seq = _nextSeq++;
|
||||||
|
SendMessageInternal(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Sent response: seq={response.Seq}, command={response.Command}, success={response.Success}");
|
||||||
|
}
|
||||||
|
catch (JsonException ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Failed to parse request: {ex.Message}");
|
||||||
|
Trace.Error($"JSON: {json}");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Trace.Error($"Error processing request: {ex}");
|
||||||
|
if (request != null)
|
||||||
|
{
|
||||||
|
SendErrorResponse(request, ex.Message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void SendErrorResponse(Request request, string message)
|
||||||
|
{
|
||||||
|
var response = new Response
|
||||||
|
{
|
||||||
|
Type = "response",
|
||||||
|
RequestSeq = request.Seq,
|
||||||
|
Command = request.Command,
|
||||||
|
Success = false,
|
||||||
|
Message = message,
|
||||||
|
Body = new ErrorResponseBody
|
||||||
|
{
|
||||||
|
Error = new Message
|
||||||
|
{
|
||||||
|
Id = 1,
|
||||||
|
Format = message,
|
||||||
|
ShowUser = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
lock (_sendLock)
|
||||||
|
{
|
||||||
|
response.Seq = _nextSeq++;
|
||||||
|
SendMessageInternal(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Reads a DAP message from the stream.
|
||||||
|
/// DAP uses HTTP-like message framing: Content-Length: N\r\n\r\n{json}
|
||||||
|
/// </summary>
|
||||||
|
private async Task<string> ReadMessageAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
// Read headers until we find Content-Length
|
||||||
|
var headerBuilder = new StringBuilder();
|
||||||
|
int contentLength = -1;
|
||||||
|
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
var line = await ReadLineAsync(cancellationToken);
|
||||||
|
if (line == null)
|
||||||
|
{
|
||||||
|
// End of stream
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (line.Length == 0)
|
||||||
|
{
|
||||||
|
// Empty line marks end of headers
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
headerBuilder.AppendLine(line);
|
||||||
|
|
||||||
|
if (line.StartsWith(ContentLengthHeader, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
var lengthStr = line.Substring(ContentLengthHeader.Length).Trim();
|
||||||
|
if (!int.TryParse(lengthStr, out contentLength))
|
||||||
|
{
|
||||||
|
throw new InvalidDataException($"Invalid Content-Length: {lengthStr}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contentLength < 0)
|
||||||
|
{
|
||||||
|
throw new InvalidDataException("Missing Content-Length header");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the JSON body
|
||||||
|
var buffer = new byte[contentLength];
|
||||||
|
var totalRead = 0;
|
||||||
|
while (totalRead < contentLength)
|
||||||
|
{
|
||||||
|
var bytesRead = await _stream.ReadAsync(buffer, totalRead, contentLength - totalRead, cancellationToken);
|
||||||
|
if (bytesRead == 0)
|
||||||
|
{
|
||||||
|
throw new EndOfStreamException("Connection closed while reading message body");
|
||||||
|
}
|
||||||
|
totalRead += bytesRead;
|
||||||
|
}
|
||||||
|
|
||||||
|
var json = Encoding.UTF8.GetString(buffer);
|
||||||
|
Trace.Verbose($"Received: {json}");
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Reads a line from the stream (terminated by \r\n).
|
||||||
|
/// </summary>
|
||||||
|
private async Task<string> ReadLineAsync(CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
var lineBuilder = new StringBuilder();
|
||||||
|
var buffer = new byte[1];
|
||||||
|
var previousWasCr = false;
|
||||||
|
|
||||||
|
while (true)
|
||||||
|
{
|
||||||
|
var bytesRead = await _stream.ReadAsync(buffer, 0, 1, cancellationToken);
|
||||||
|
if (bytesRead == 0)
|
||||||
|
{
|
||||||
|
// End of stream
|
||||||
|
return lineBuilder.Length > 0 ? lineBuilder.ToString() : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var c = (char)buffer[0];
|
||||||
|
|
||||||
|
if (c == '\n' && previousWasCr)
|
||||||
|
{
|
||||||
|
// Found \r\n, return the line (without the \r)
|
||||||
|
if (lineBuilder.Length > 0 && lineBuilder[lineBuilder.Length - 1] == '\r')
|
||||||
|
{
|
||||||
|
lineBuilder.Length--;
|
||||||
|
}
|
||||||
|
return lineBuilder.ToString();
|
||||||
|
}
|
||||||
|
|
||||||
|
previousWasCr = (c == '\r');
|
||||||
|
lineBuilder.Append(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Sends a DAP message to the stream with Content-Length framing.
|
||||||
|
/// Must be called within the _sendLock.
|
||||||
|
/// </summary>
|
||||||
|
private void SendMessageInternal(ProtocolMessage message)
|
||||||
|
{
|
||||||
|
var json = JsonConvert.SerializeObject(message, new JsonSerializerSettings
|
||||||
|
{
|
||||||
|
NullValueHandling = NullValueHandling.Ignore
|
||||||
|
});
|
||||||
|
|
||||||
|
var bodyBytes = Encoding.UTF8.GetBytes(json);
|
||||||
|
var header = $"Content-Length: {bodyBytes.Length}\r\n\r\n";
|
||||||
|
var headerBytes = Encoding.UTF8.GetBytes(header);
|
||||||
|
|
||||||
|
_stream.Write(headerBytes, 0, headerBytes.Length);
|
||||||
|
_stream.Write(bodyBytes, 0, bodyBytes.Length);
|
||||||
|
_stream.Flush();
|
||||||
|
|
||||||
|
Trace.Verbose($"Sent: {json}");
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
Dispose(true);
|
||||||
|
GC.SuppressFinalize(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void Dispose(bool disposing)
|
||||||
|
{
|
||||||
|
if (_disposed)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (disposing)
|
||||||
|
{
|
||||||
|
_cts?.Cancel();
|
||||||
|
_stream?.Dispose();
|
||||||
|
_client?.Dispose();
|
||||||
|
_listener?.Stop();
|
||||||
|
_cts?.Dispose();
|
||||||
|
}
|
||||||
|
|
||||||
|
_disposed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
293
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
293
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Dap
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Provides DAP variable information from the execution context.
|
||||||
|
/// Maps workflow contexts (github, env, runner, job, steps, secrets) to DAP scopes and variables.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class DapVariableProvider
|
||||||
|
{
|
||||||
|
// Well-known scope names that map to top-level contexts
|
||||||
|
private static readonly string[] ScopeNames = { "github", "env", "runner", "job", "steps", "secrets", "inputs", "vars", "matrix", "needs" };
|
||||||
|
|
||||||
|
// Reserved variable reference ranges for scopes (1-100)
|
||||||
|
private const int ScopeReferenceBase = 1;
|
||||||
|
private const int ScopeReferenceMax = 100;
|
||||||
|
|
||||||
|
// Dynamic variable references start after scope range
|
||||||
|
private const int DynamicReferenceBase = 101;
|
||||||
|
|
||||||
|
private readonly IHostContext _hostContext;
|
||||||
|
private readonly Dictionary<int, (PipelineContextData Data, string Path)> _variableReferences = new();
|
||||||
|
private int _nextVariableReference = DynamicReferenceBase;
|
||||||
|
|
||||||
|
public DapVariableProvider(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
_hostContext = hostContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Resets the variable reference state. Call this when the execution context changes.
|
||||||
|
/// </summary>
|
||||||
|
public void Reset()
|
||||||
|
{
|
||||||
|
_variableReferences.Clear();
|
||||||
|
_nextVariableReference = DynamicReferenceBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the list of scopes for a given execution context.
|
||||||
|
/// Each scope represents a top-level context like 'github', 'env', etc.
|
||||||
|
/// </summary>
|
||||||
|
public List<Scope> GetScopes(IExecutionContext context, int frameId)
|
||||||
|
{
|
||||||
|
var scopes = new List<Scope>();
|
||||||
|
|
||||||
|
if (context?.ExpressionValues == null)
|
||||||
|
{
|
||||||
|
return scopes;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < ScopeNames.Length; i++)
|
||||||
|
{
|
||||||
|
var scopeName = ScopeNames[i];
|
||||||
|
if (context.ExpressionValues.TryGetValue(scopeName, out var value) && value != null)
|
||||||
|
{
|
||||||
|
var variablesRef = ScopeReferenceBase + i;
|
||||||
|
var scope = new Scope
|
||||||
|
{
|
||||||
|
Name = scopeName,
|
||||||
|
VariablesReference = variablesRef,
|
||||||
|
Expensive = false,
|
||||||
|
// Secrets get a special presentation hint
|
||||||
|
PresentationHint = scopeName == "secrets" ? "registers" : null
|
||||||
|
};
|
||||||
|
|
||||||
|
// Count named variables if it's a dictionary
|
||||||
|
if (value is DictionaryContextData dict)
|
||||||
|
{
|
||||||
|
scope.NamedVariables = dict.Count;
|
||||||
|
}
|
||||||
|
else if (value is CaseSensitiveDictionaryContextData csDict)
|
||||||
|
{
|
||||||
|
scope.NamedVariables = csDict.Count;
|
||||||
|
}
|
||||||
|
|
||||||
|
scopes.Add(scope);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return scopes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Gets variables for a given variable reference.
|
||||||
|
/// </summary>
|
||||||
|
public List<Variable> GetVariables(IExecutionContext context, int variablesReference)
|
||||||
|
{
|
||||||
|
var variables = new List<Variable>();
|
||||||
|
|
||||||
|
if (context?.ExpressionValues == null)
|
||||||
|
{
|
||||||
|
return variables;
|
||||||
|
}
|
||||||
|
|
||||||
|
PipelineContextData data = null;
|
||||||
|
string basePath = null;
|
||||||
|
bool isSecretsScope = false;
|
||||||
|
|
||||||
|
// Check if this is a scope reference (1-100)
|
||||||
|
if (variablesReference >= ScopeReferenceBase && variablesReference <= ScopeReferenceMax)
|
||||||
|
{
|
||||||
|
var scopeIndex = variablesReference - ScopeReferenceBase;
|
||||||
|
if (scopeIndex < ScopeNames.Length)
|
||||||
|
{
|
||||||
|
var scopeName = ScopeNames[scopeIndex];
|
||||||
|
isSecretsScope = scopeName == "secrets";
|
||||||
|
if (context.ExpressionValues.TryGetValue(scopeName, out data))
|
||||||
|
{
|
||||||
|
basePath = scopeName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Check dynamic references
|
||||||
|
else if (_variableReferences.TryGetValue(variablesReference, out var refData))
|
||||||
|
{
|
||||||
|
data = refData.Data;
|
||||||
|
basePath = refData.Path;
|
||||||
|
// Check if we're inside the secrets scope
|
||||||
|
isSecretsScope = basePath?.StartsWith("secrets", StringComparison.OrdinalIgnoreCase) == true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data == null)
|
||||||
|
{
|
||||||
|
return variables;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the data to variables
|
||||||
|
ConvertToVariables(data, basePath, isSecretsScope, variables);
|
||||||
|
|
||||||
|
return variables;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Converts PipelineContextData to DAP Variable objects.
|
||||||
|
/// </summary>
|
||||||
|
private void ConvertToVariables(PipelineContextData data, string basePath, bool isSecretsScope, List<Variable> variables)
|
||||||
|
{
|
||||||
|
switch (data)
|
||||||
|
{
|
||||||
|
case DictionaryContextData dict:
|
||||||
|
ConvertDictionaryToVariables(dict, basePath, isSecretsScope, variables);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CaseSensitiveDictionaryContextData csDict:
|
||||||
|
ConvertCaseSensitiveDictionaryToVariables(csDict, basePath, isSecretsScope, variables);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case ArrayContextData array:
|
||||||
|
ConvertArrayToVariables(array, basePath, isSecretsScope, variables);
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Scalar value - shouldn't typically get here for a container
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ConvertDictionaryToVariables(DictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
|
||||||
|
{
|
||||||
|
foreach (var pair in dict)
|
||||||
|
{
|
||||||
|
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
|
||||||
|
variables.Add(variable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ConvertCaseSensitiveDictionaryToVariables(CaseSensitiveDictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
|
||||||
|
{
|
||||||
|
foreach (var pair in dict)
|
||||||
|
{
|
||||||
|
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
|
||||||
|
variables.Add(variable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ConvertArrayToVariables(ArrayContextData array, string basePath, bool isSecretsScope, List<Variable> variables)
|
||||||
|
{
|
||||||
|
for (int i = 0; i < array.Count; i++)
|
||||||
|
{
|
||||||
|
var item = array[i];
|
||||||
|
var variable = CreateVariable($"[{i}]", item, basePath, isSecretsScope);
|
||||||
|
variable.Name = $"[{i}]";
|
||||||
|
variables.Add(variable);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Variable CreateVariable(string name, PipelineContextData value, string basePath, bool isSecretsScope)
|
||||||
|
{
|
||||||
|
var childPath = string.IsNullOrEmpty(basePath) ? name : $"{basePath}.{name}";
|
||||||
|
var variable = new Variable
|
||||||
|
{
|
||||||
|
Name = name,
|
||||||
|
EvaluateName = $"${{{{ {childPath} }}}}"
|
||||||
|
};
|
||||||
|
|
||||||
|
if (value == null)
|
||||||
|
{
|
||||||
|
variable.Value = "null";
|
||||||
|
variable.Type = "null";
|
||||||
|
variable.VariablesReference = 0;
|
||||||
|
return variable;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (value)
|
||||||
|
{
|
||||||
|
case StringContextData str:
|
||||||
|
if (isSecretsScope)
|
||||||
|
{
|
||||||
|
// Always mask secrets regardless of value
|
||||||
|
variable.Value = "[REDACTED]";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Mask any secret values that might be in non-secret contexts
|
||||||
|
variable.Value = MaskSecrets(str.Value);
|
||||||
|
}
|
||||||
|
variable.Type = "string";
|
||||||
|
variable.VariablesReference = 0;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case NumberContextData num:
|
||||||
|
variable.Value = num.ToString();
|
||||||
|
variable.Type = "number";
|
||||||
|
variable.VariablesReference = 0;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case BooleanContextData boolVal:
|
||||||
|
variable.Value = boolVal.Value ? "true" : "false";
|
||||||
|
variable.Type = "boolean";
|
||||||
|
variable.VariablesReference = 0;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case DictionaryContextData dict:
|
||||||
|
variable.Value = $"Object ({dict.Count} properties)";
|
||||||
|
variable.Type = "object";
|
||||||
|
variable.VariablesReference = RegisterVariableReference(dict, childPath);
|
||||||
|
variable.NamedVariables = dict.Count;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case CaseSensitiveDictionaryContextData csDict:
|
||||||
|
variable.Value = $"Object ({csDict.Count} properties)";
|
||||||
|
variable.Type = "object";
|
||||||
|
variable.VariablesReference = RegisterVariableReference(csDict, childPath);
|
||||||
|
variable.NamedVariables = csDict.Count;
|
||||||
|
break;
|
||||||
|
|
||||||
|
case ArrayContextData array:
|
||||||
|
variable.Value = $"Array ({array.Count} items)";
|
||||||
|
variable.Type = "array";
|
||||||
|
variable.VariablesReference = RegisterVariableReference(array, childPath);
|
||||||
|
variable.IndexedVariables = array.Count;
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
// Unknown type - convert to string representation
|
||||||
|
var rawValue = value.ToJToken()?.ToString() ?? "unknown";
|
||||||
|
variable.Value = MaskSecrets(rawValue);
|
||||||
|
variable.Type = value.GetType().Name;
|
||||||
|
variable.VariablesReference = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return variable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Registers a nested variable reference and returns its ID.
|
||||||
|
/// </summary>
|
||||||
|
private int RegisterVariableReference(PipelineContextData data, string path)
|
||||||
|
{
|
||||||
|
var reference = _nextVariableReference++;
|
||||||
|
_variableReferences[reference] = (data, path);
|
||||||
|
return reference;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Masks any secret values in the string using the host context's secret masker.
|
||||||
|
/// </summary>
|
||||||
|
private string MaskSecrets(string value)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(value))
|
||||||
|
{
|
||||||
|
return value ?? string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
return _hostContext.SecretMasker.MaskSecrets(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
87
src/Runner.Worker/Dap/StepCheckpoint.cs
Normal file
87
src/Runner.Worker/Dap/StepCheckpoint.cs
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Dap
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Represents a snapshot of job state captured just before a step executes.
|
||||||
|
/// Created when user issues next/continue command, after any REPL modifications.
|
||||||
|
/// Used for step-back (time-travel) debugging.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class StepCheckpoint
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Index of this checkpoint in the checkpoints list.
|
||||||
|
/// Used when restoring to identify which checkpoint to restore to.
|
||||||
|
/// </summary>
|
||||||
|
public int CheckpointIndex { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Zero-based index of the step in the job.
|
||||||
|
/// </summary>
|
||||||
|
public int StepIndex { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Display name of the step this checkpoint was created for.
|
||||||
|
/// </summary>
|
||||||
|
public string StepDisplayName { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of Global.EnvironmentVariables.
|
||||||
|
/// </summary>
|
||||||
|
public Dictionary<string, string> EnvironmentVariables { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of ExpressionValues["env"] context data.
|
||||||
|
/// </summary>
|
||||||
|
public Dictionary<string, string> EnvContextData { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of Global.PrependPath.
|
||||||
|
/// </summary>
|
||||||
|
public List<string> PrependPath { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of job result.
|
||||||
|
/// </summary>
|
||||||
|
public TaskResult? JobResult { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of job status.
|
||||||
|
/// </summary>
|
||||||
|
public ActionResult? JobStatus { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of steps context (outputs, outcomes, conclusions).
|
||||||
|
/// Key is "{scopeName}/{stepName}", value is the step's state.
|
||||||
|
/// </summary>
|
||||||
|
public Dictionary<string, StepStateSnapshot> StepsSnapshot { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The step that was about to execute (for re-running).
|
||||||
|
/// </summary>
|
||||||
|
public IStep CurrentStep { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Steps remaining in the queue after CurrentStep.
|
||||||
|
/// </summary>
|
||||||
|
public List<IStep> RemainingSteps { get; set; }
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// When this checkpoint was created.
|
||||||
|
/// </summary>
|
||||||
|
public DateTime CreatedAt { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Snapshot of a single step's state in the steps context.
|
||||||
|
/// </summary>
|
||||||
|
public sealed class StepStateSnapshot
|
||||||
|
{
|
||||||
|
public ActionResult? Outcome { get; set; }
|
||||||
|
public ActionResult? Conclusion { get; set; }
|
||||||
|
public Dictionary<string, string> Outputs { get; set; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -522,6 +522,10 @@ namespace GitHub.Runner.Worker
|
|||||||
if (annotation != null)
|
if (annotation != null)
|
||||||
{
|
{
|
||||||
stepResult.Annotations.Add(annotation.Value);
|
stepResult.Annotations.Add(annotation.Value);
|
||||||
|
if (annotation.Value.IsInfrastructureIssue && string.IsNullOrEmpty(Global.InfrastructureFailureCategory))
|
||||||
|
{
|
||||||
|
Global.InfrastructureFailureCategory = issue.Category;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -862,7 +866,21 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
ExpressionValues["secrets"] = Global.Variables.ToSecretsContext();
|
ExpressionValues["secrets"] = Global.Variables.ToSecretsContext();
|
||||||
ExpressionValues["runner"] = new RunnerContext();
|
ExpressionValues["runner"] = new RunnerContext();
|
||||||
ExpressionValues["job"] = new JobContext();
|
|
||||||
|
Trace.Info("Initializing Job context");
|
||||||
|
var jobContext = new JobContext();
|
||||||
|
if (Global.Variables.GetBoolean(Constants.Runner.Features.AddCheckRunIdToJobContext) ?? false)
|
||||||
|
{
|
||||||
|
ExpressionValues.TryGetValue("job", out var jobDictionary);
|
||||||
|
if (jobDictionary != null)
|
||||||
|
{
|
||||||
|
foreach (var pair in jobDictionary.AssertDictionary("job"))
|
||||||
|
{
|
||||||
|
jobContext[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ExpressionValues["job"] = jobContext;
|
||||||
|
|
||||||
Trace.Info("Initialize GitHub context");
|
Trace.Info("Initialize GitHub context");
|
||||||
var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token"));
|
var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token"));
|
||||||
@@ -1288,10 +1306,14 @@ namespace GitHub.Runner.Worker
|
|||||||
UpdateGlobalStepsContext();
|
UpdateGlobalStepsContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal IPipelineTemplateEvaluator ToPipelineTemplateEvaluatorInternal(ObjectTemplating.ITraceWriter traceWriter = null)
|
||||||
|
{
|
||||||
|
return new PipelineTemplateEvaluatorWrapper(HostContext, this, traceWriter);
|
||||||
|
}
|
||||||
|
|
||||||
private static void NoOp()
|
private static void NoOp()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
|
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
|
||||||
@@ -1321,9 +1343,9 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||||
public static void InfrastructureError(this IExecutionContext context, string message)
|
public static void InfrastructureError(this IExecutionContext context, string message, string category = null)
|
||||||
{
|
{
|
||||||
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true };
|
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true, Category = category };
|
||||||
context.AddIssue(issue, ExecutionContextLogOptions.Default);
|
context.AddIssue(issue, ExecutionContextLogOptions.Default);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1372,8 +1394,15 @@ namespace GitHub.Runner.Worker
|
|||||||
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
|
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
|
||||||
}
|
}
|
||||||
|
|
||||||
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
|
public static IPipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
|
||||||
{
|
{
|
||||||
|
// Create wrapper?
|
||||||
|
if ((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false) || StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER")))
|
||||||
|
{
|
||||||
|
return (context as ExecutionContext).ToPipelineTemplateEvaluatorInternal(traceWriter);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy
|
||||||
if (traceWriter == null)
|
if (traceWriter == null)
|
||||||
{
|
{
|
||||||
traceWriter = context.ToTemplateTraceWriter();
|
traceWriter = context.ToTemplateTraceWriter();
|
||||||
|
|||||||
@@ -22,4 +22,13 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class NewAlwaysFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||||
|
{
|
||||||
|
protected override Object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext context, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,4 +28,18 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
return jobStatus == ActionResult.Cancelled;
|
return jobStatus == ActionResult.Cancelled;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class NewCancelledFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Cancelled;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,4 +39,29 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class NewFailureFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
|
||||||
|
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
|
||||||
|
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
|
||||||
|
if (isCompositeMainStep)
|
||||||
|
{
|
||||||
|
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
|
||||||
|
return actionStatus == ActionResult.Failure;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Failure;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -143,4 +143,137 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class NewHashFilesFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||||
|
{
|
||||||
|
private const int _hashFileTimeoutSeconds = 120;
|
||||||
|
|
||||||
|
protected sealed override Object EvaluateCore(
|
||||||
|
GitHub.Actions.Expressions.Sdk.EvaluationContext context,
|
||||||
|
out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = context.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData);
|
||||||
|
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
||||||
|
var githubContext = githubContextData as GitHub.Actions.Expressions.Data.DictionaryExpressionData;
|
||||||
|
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||||
|
|
||||||
|
if (!githubContext.TryGetValue(PipelineTemplateConstants.HostWorkspace, out var workspace))
|
||||||
|
{
|
||||||
|
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out workspace);
|
||||||
|
}
|
||||||
|
ArgUtil.NotNull(workspace, nameof(workspace));
|
||||||
|
|
||||||
|
var workspaceData = workspace as GitHub.Actions.Expressions.Data.StringExpressionData;
|
||||||
|
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
||||||
|
|
||||||
|
string githubWorkspace = workspaceData.Value;
|
||||||
|
|
||||||
|
bool followSymlink = false;
|
||||||
|
List<string> patterns = new();
|
||||||
|
var firstParameter = true;
|
||||||
|
foreach (var parameter in Parameters)
|
||||||
|
{
|
||||||
|
var parameterString = parameter.Evaluate(context).ConvertToString();
|
||||||
|
if (firstParameter)
|
||||||
|
{
|
||||||
|
firstParameter = false;
|
||||||
|
if (parameterString.StartsWith("--"))
|
||||||
|
{
|
||||||
|
if (string.Equals(parameterString, "--follow-symbolic-links", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
followSymlink = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new ArgumentOutOfRangeException($"Invalid glob option {parameterString}, avaliable option: '--follow-symbolic-links'.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
patterns.Add(parameterString);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.Trace.Info($"Search root directory: '{githubWorkspace}'");
|
||||||
|
context.Trace.Info($"Search pattern: '{string.Join(", ", patterns)}'");
|
||||||
|
|
||||||
|
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||||
|
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||||
|
|
||||||
|
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||||
|
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||||
|
var hashResult = string.Empty;
|
||||||
|
var p = new ProcessInvoker(new NewHashFilesTrace(context.Trace));
|
||||||
|
p.ErrorDataReceived += ((_, data) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||||
|
{
|
||||||
|
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||||
|
context.Trace.Info($"Hash result: '{hashResult}'");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
context.Trace.Info(data.Data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
p.OutputDataReceived += ((_, data) =>
|
||||||
|
{
|
||||||
|
context.Trace.Info(data.Data);
|
||||||
|
});
|
||||||
|
|
||||||
|
var env = new Dictionary<string, string>();
|
||||||
|
if (followSymlink)
|
||||||
|
{
|
||||||
|
env["followSymbolicLinks"] = "true";
|
||||||
|
}
|
||||||
|
env["patterns"] = string.Join(Environment.NewLine, patterns);
|
||||||
|
|
||||||
|
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
|
||||||
|
fileName: node,
|
||||||
|
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||||
|
environment: env,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
|
||||||
|
|
||||||
|
if (exitCode != 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return hashResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class NewHashFilesTrace : ITraceWriter
|
||||||
|
{
|
||||||
|
private GitHub.Actions.Expressions.ITraceWriter _trace;
|
||||||
|
|
||||||
|
public NewHashFilesTrace(GitHub.Actions.Expressions.ITraceWriter trace)
|
||||||
|
{
|
||||||
|
_trace = trace;
|
||||||
|
}
|
||||||
|
public void Info(string message)
|
||||||
|
{
|
||||||
|
_trace.Info(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Verbose(string message)
|
||||||
|
{
|
||||||
|
_trace.Info(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,4 +39,29 @@ namespace GitHub.Runner.Worker.Expressions
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public sealed class NewSuccessFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
|
||||||
|
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
|
||||||
|
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
|
||||||
|
if (isCompositeMainStep)
|
||||||
|
{
|
||||||
|
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
|
||||||
|
return actionStatus == ActionResult.Success;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Success;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user