Compare commits

..

4 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
659915810c Add documentation index link to main README
Co-authored-by: salmanmkc <32169182+salmanmkc@users.noreply.github.com>
2025-08-14 10:34:53 +00:00
copilot-swe-agent[bot]
ea03aa2a6e Enhance contribution guide, automation docs, and add documentation index
Co-authored-by: salmanmkc <32169182+salmanmkc@users.noreply.github.com>
2025-08-14 10:33:31 +00:00
copilot-swe-agent[bot]
691793a922 Improve README and OS prerequisites documentation
Co-authored-by: salmanmkc <32169182+salmanmkc@users.noreply.github.com>
2025-08-14 10:30:04 +00:00
copilot-swe-agent[bot]
f11013518c Initial plan 2025-08-14 10:14:18 +00:00
278 changed files with 1317 additions and 36605 deletions

View File

@@ -4,7 +4,7 @@
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "8.0.416"
"version": "8.0.412"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "20"

View File

@@ -1,8 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: 🛑 Request a feature in the runner application
url: https://github.com/orgs/community/discussions/categories/actions
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
- name: ✅ Support for GitHub Actions
url: https://github.community/c/code-to-cloud/52
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.

View File

@@ -14,9 +14,6 @@ on:
paths-ignore:
- '**.md'
permissions:
contents: read
jobs:
build:
strategy:
@@ -53,7 +50,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
# Build runner layout
- name: Build & Layout Release
@@ -78,53 +75,8 @@ jobs:
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v4
with:
name: runner-package-${{ matrix.runtime }}
path: |
_package
docker:
strategy:
matrix:
os: [ ubuntu-latest, ubuntu-24.04-arm ]
include:
- os: ubuntu-latest
docker_platform: linux/amd64
- os: ubuntu-24.04-arm
docker_platform: linux/arm64
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v6
- name: Get latest runner version
id: latest_runner
uses: actions/github-script@v8
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const release = await github.rest.repos.getLatestRelease({
owner: 'actions',
repo: 'runner',
});
const version = release.data.tag_name.replace(/^v/, '');
core.setOutput('version', version);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker image
uses: docker/build-push-action@v6
with:
context: ./images
load: true
platforms: ${{ matrix.docker_platform }}
tags: |
${{ github.sha }}:latest
build-args: |
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
- name: Test Docker image
run: |
docker run --rm ${{ github.sha }}:latest ./run.sh --version

View File

@@ -7,7 +7,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v10
- uses: actions/stale@v9
with:
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
exempt-issue-labels: "keep"

View File

@@ -7,9 +7,9 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v10
- uses: actions/stale@v9
with:
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
exempt-issue-labels: "keep"
stale-issue-label: "actions-feature"
only-labels: "actions-feature"

View File

@@ -23,11 +23,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@v5
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
uses: github/codeql-action/init@v3
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
@@ -38,4 +38,4 @@ jobs:
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
uses: github/codeql-action/analyze@v3

View File

@@ -1,211 +0,0 @@
name: Dependency Status Check
on:
workflow_dispatch:
inputs:
check_type:
description: "Type of dependency check"
required: false
default: "all"
type: choice
options:
- all
- node
- dotnet
- docker
- npm
schedule:
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
jobs:
dependency-status:
runs-on: ubuntu-latest
outputs:
node20-status: ${{ steps.check-versions.outputs.node20-status }}
node24-status: ${{ steps.check-versions.outputs.node24-status }}
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
docker-status: ${{ steps.check-versions.outputs.docker-status }}
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
steps:
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: "20"
- name: Check dependency versions
id: check-versions
run: |
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check Node versions
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
NODE20_STATUS="✅ up-to-date"
NODE24_STATUS="✅ up-to-date"
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
NODE20_STATUS="⚠️ outdated"
fi
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
NODE24_STATUS="⚠️ outdated"
fi
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
fi
# Check .NET version
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
DOTNET_STATUS="✅ up-to-date"
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
DOTNET_STATUS="⚠️ outdated"
fi
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
fi
# Check Docker versions
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
DOCKER_STATUS="✅ up-to-date"
BUILDX_STATUS="✅ up-to-date"
if [ "$current_docker" != "$latest_docker" ]; then
DOCKER_STATUS="⚠️ outdated"
fi
if [ "$current_buildx" != "$latest_buildx" ]; then
BUILDX_STATUS="⚠️ outdated"
fi
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
fi
# Check npm vulnerabilities
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
cd src/Misc/expressionFunc/hashFiles
npm install --silent
AUDIT_OUTPUT=""
AUDIT_EXIT_CODE=0
# Run npm audit and capture output and exit code
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
AUDIT_EXIT_CODE=$?
fi
# Check if output is valid JSON
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
# Ensure VULN_COUNT is a number
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
VULN_COUNT=${VULN_COUNT:-0}
NPM_STATUS="✅ no vulnerabilities"
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
# Get vulnerability details
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
else
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
else
NPM_STATUS="❌ npm audit failed"
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
fi
- name: Check for open dependency PRs
id: check-prs
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
# Get open PRs with dependency label
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
if [ "$PR_COUNT" -gt 0 ]; then
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
else
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
- name: Summary
run: |
echo "### Summary" >> $GITHUB_STEP_SUMMARY
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY

View File

@@ -2,8 +2,8 @@ name: "Docker/Buildx Version Upgrade"
on:
schedule:
- cron: "0 0 * * 1" # Run every Monday at midnight
workflow_dispatch: # Allow manual triggering
- cron: '0 0 * * 1' # Run every Monday at midnight
workflow_dispatch: # Allow manual triggering
jobs:
check-versions:
@@ -17,7 +17,7 @@ jobs:
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@v5
- name: Check Docker version
id: check_docker_version
@@ -35,7 +35,7 @@ jobs:
echo "Failed to retrieve a valid Docker version"
exit 1
fi
should_update=0
[ "$current_version" != "$latest_version" ] && should_update=1
@@ -64,17 +64,17 @@ jobs:
run: |
docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}"
buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}"
# Show annotation if only Docker needs update
if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then
echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates."
fi
# Show annotation if only Buildx needs update
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then
echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates."
fi
# Show annotation when both are current
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then
echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed."
@@ -89,26 +89,26 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@v5
- name: Update Docker version
shell: bash
run: |
latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}"
current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}"
# Update version in Dockerfile
sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile
- name: Update Buildx version
shell: bash
run: |
latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}"
# Update version in Dockerfile
sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile
- name: Commit changes and create Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -117,7 +117,7 @@ jobs:
branch_name="feature/docker-buildx-upgrade"
commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
# Configure git
git config --global user.name "github-actions[bot]"
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
@@ -129,38 +129,16 @@ jobs:
else
git checkout -b "$branch_name"
fi
# Commit and push changes
git commit -a -m "$commit_message"
git push --force origin "$branch_name"
# Create PR body using here-doc for proper formatting
cat > pr_body.txt << 'EOF'
Automated Docker and Buildx version update:
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
**Release notes:** https://docs.docker.com/engine/release-notes/
**Next steps:**
- Review the version changes
- Verify container builds work as expected
- Test multi-platform builds if applicable
- Merge when ready
---
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
EOF
# Create PR
pr_body="Upgrades Docker version from ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Docker Buildx version from ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}.\n\n"
pr_body+="Release notes: https://docs.docker.com/engine/release-notes/\n\n"
pr_body+="---\n\nAutogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)"
gh pr create -B main -H "$branch_name" \
--title "$pr_title" \
--label "dependencies" \
--label "dependencies-weekly-check" \
--label "dependencies-not-dependabot" \
--label "docker" \
--body-file pr_body.txt
--body "$pr_body"

View File

@@ -1,75 +0,0 @@
name: Publish DockerImage from Release Branch
on:
workflow_dispatch:
inputs:
releaseBranch:
description: 'Release Branch (releases/mXXX)'
required: true
jobs:
publish-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
attestations: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v6
with:
ref: ${{ github.event.inputs.releaseBranch }}
- name: Compute image version
id: image
uses: actions/github-script@v8
with:
script: |
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
console.log(`Using runner version ${runnerVersion}`);
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
throw new Error(`Invalid runner version: ${runnerVersion}`);
}
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6
with:
context: ./images
platforms: |
linux/amd64
linux/arm64
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.licenses=MIT
annotations: |
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
- name: Generate attestation
uses: actions/attest-build-provenance@v3
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
push-to-registry: true

View File

@@ -2,20 +2,20 @@ name: "DotNet SDK Upgrade"
on:
schedule:
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
- cron: '0 0 * * 1'
workflow_dispatch:
jobs:
dotnet-update:
runs-on: ubuntu-latest
outputs:
outputs:
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@v5
- name: Get current major minor version
id: fetch_current_version
shell: bash
@@ -37,7 +37,7 @@ jobs:
# check if git branch already exists for the upgrade
branch_already_exists=0
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
then
branch_already_exists=1
@@ -89,17 +89,17 @@ jobs:
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Create Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
- uses: actions/checkout@v5
with:
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
- name: Create Pull Request
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
---
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
---
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"

View File

@@ -1,194 +0,0 @@
name: Auto Update Node Version
on:
schedule:
- cron: "0 6 * * 1" # Weekly, every Monday
workflow_dispatch:
jobs:
update-node:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Get latest Node versions
id: node-versions
run: |
# Get latest Node.js releases from official GitHub releases
echo "Fetching latest Node.js releases..."
# Get latest v20.x release
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
head -1 | sed 's/^v//')
# Get latest v24.x release
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
head -1 | sed 's/^v//')
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
# Verify these versions are available in alpine_nodejs releases
echo "Verifying availability in alpine_nodejs..."
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
# Fall back to latest available alpine_nodejs v20 release
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
fi
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
# Fall back to latest available alpine_nodejs v24 release
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
fi
# Validate that we have non-empty version numbers
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
echo "Available alpine_nodejs releases:"
echo "$ALPINE_RELEASES" | head -10
exit 1
fi
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
echo "Available alpine_nodejs releases:"
echo "$ALPINE_RELEASES" | head -10
exit 1
fi
# Additional validation: ensure versions match expected format (x.y.z)
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
exit 1
fi
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
exit 1
fi
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
# Check current versions in externals.sh
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
# Determine if updates are needed
NEEDS_UPDATE20="false"
NEEDS_UPDATE24="false"
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
NEEDS_UPDATE20="true"
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
fi
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
NEEDS_UPDATE24="true"
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
fi
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
fi
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
- name: Update externals.sh and create PR
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Final validation before making changes
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
echo "Final validation of versions before PR creation:"
echo "Node 20: '$NODE20_VERSION'"
echo "Node 24: '$NODE24_VERSION'"
# Validate versions are not empty and match expected format
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
exit 1
fi
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
exit 1
fi
echo "✅ All versions validated successfully"
# Update the files
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
fi
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
fi
# Verify the changes were applied correctly
echo "Verifying changes in externals.sh:"
grep "NODE20_VERSION=" src/Misc/externals.sh
grep "NODE24_VERSION=" src/Misc/externals.sh
# Ensure we actually have valid versions in the file
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
echo "::error title=Update Failed::Failed to properly update externals.sh"
echo "Updated Node 20: '$UPDATED_NODE20'"
echo "Updated Node 24: '$UPDATED_NODE24'"
exit 1
fi
# Configure git
git config --global user.name "github-actions[bot]"
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
# Create branch and commit changes
branch_name="chore/update-node"
git checkout -b "$branch_name"
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
git push --force origin "$branch_name"
# Create PR body using here-doc for proper formatting
cat > pr_body.txt << EOF
Automated Node.js version update:
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
---
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
EOF
# Create PR
gh pr create -B main -H "$branch_name" \
--title "chore: update Node versions" \
--label "dependencies" \
--label "dependencies-weekly-check" \
--label "dependencies-not-dependabot" \
--label "node" \
--label "javascript" \
--body-file pr_body.txt
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"

View File

@@ -1,235 +0,0 @@
name: NPM Audit Fix with TypeScript Auto-Fix
on:
workflow_dispatch:
jobs:
npm-audit-with-ts-fix:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: "20"
- name: NPM install and audit fix with TypeScript auto-repair
working-directory: src/Misc/expressionFunc/hashFiles
run: |
npm install
# Check for vulnerabilities first
echo "Checking for npm vulnerabilities..."
if npm audit --audit-level=moderate; then
echo "✅ No moderate or higher vulnerabilities found"
exit 0
fi
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
# Attempt audit fix and capture the result
if npm audit fix; then
echo "✅ npm audit fix completed successfully"
AUDIT_FIX_STATUS="success"
else
echo "⚠️ npm audit fix failed or had issues"
AUDIT_FIX_STATUS="failed"
# Try audit fix with --force as a last resort for critical/high vulns only
echo "Checking if critical/high vulnerabilities remain..."
if ! npm audit --audit-level=high; then
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
if npm audit fix --force; then
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
AUDIT_FIX_STATUS="force-fixed"
else
echo "❌ npm audit fix --force also failed"
AUDIT_FIX_STATUS="force-failed"
fi
else
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
AUDIT_FIX_STATUS="partial-success"
fi
fi
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
# Try to fix TypeScript issues automatically
echo "Attempting to fix TypeScript compatibility issues..."
# Check if build fails
if ! npm run build 2>/dev/null; then
echo "Build failed, attempting automated fixes..."
# Common fix 1: Update @types/node to latest compatible version
echo "Trying to update @types/node to latest version..."
npm update @types/node
# Common fix 2: If that doesn't work, try installing a specific known-good version
if ! npm run build 2>/dev/null; then
echo "Trying specific @types/node version..."
# Try Node 20 compatible version
npm install --save-dev @types/node@^20.0.0
fi
# Common fix 3: Clear node_modules and reinstall if still failing
if ! npm run build 2>/dev/null; then
echo "Clearing node_modules and reinstalling..."
rm -rf node_modules package-lock.json
npm install
# Re-run audit fix after clean install if it was successful before
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
echo "Re-running npm audit fix after clean install..."
npm audit fix || echo "Audit fix failed on second attempt"
fi
fi
# Common fix 4: Try updating TypeScript itself
if ! npm run build 2>/dev/null; then
echo "Trying to update TypeScript..."
npm update typescript
fi
# Final check
if npm run build 2>/dev/null; then
echo "✅ Successfully fixed TypeScript issues automatically"
else
echo "⚠️ Could not automatically fix TypeScript issues"
fi
else
echo "✅ Build passes after audit fix"
fi
- name: Create PR if changes exist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HUSKY: 0 # Disable husky hooks for automated commits
run: |
# Check if there are any changes
if [ -n "$(git status --porcelain)" ]; then
# Configure git
git config --global user.name "github-actions[bot]"
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
# Create branch and commit changes
branch_name="chore/npm-audit-fix-with-ts-repair"
git checkout -b "$branch_name"
# Commit with --no-verify to skip husky hooks
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
git push --force origin "$branch_name"
# Check final build status and gather info about what was changed
build_status="✅ Build passes"
fixes_applied=""
cd src/Misc/expressionFunc/hashFiles
# Check what packages were updated
if git diff HEAD~1 package.json | grep -q "@types/node"; then
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
fi
if git diff HEAD~1 package.json | grep -q "typescript"; then
fixes_applied+="\n- Updated TypeScript version"
fi
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
fixes_applied+="\n- Updated package dependencies via npm audit fix"
fi
if ! npm run build 2>/dev/null; then
build_status="⚠️ Build fails - manual review required"
fi
cd - > /dev/null
# Create enhanced PR body using here-doc for proper formatting
audit_status_msg=""
case "$AUDIT_FIX_STATUS" in
"success")
audit_status_msg="✅ **Audit Fix**: Completed successfully"
;;
"partial-success")
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
;;
"force-fixed")
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
;;
"failed"|"force-failed")
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
;;
*)
audit_status_msg="❓ **Audit Fix**: Status unknown"
;;
esac
if [[ "$build_status" == *"fails"* ]]; then
cat > pr_body.txt << EOF
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
**Build Status**: ⚠️ Build fails - manual review required
$audit_status_msg
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
Common issues and solutions:
- Check for TypeScript version compatibility with Node.js types
- Review breaking changes in updated dependencies
- Consider pinning problematic dependency versions temporarily
- Review tsconfig.json for compatibility settings
**Automated Fix Strategy**:
1. Run npm audit fix with proper error handling
2. Update @types/node to latest compatible version
3. Try Node 20 specific @types/node version if needed
4. Clean reinstall dependencies if conflicts persist
5. Update TypeScript compiler if necessary
---
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
EOF
else
cat > pr_body.txt << EOF
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
**Build Status**: ✅ Build passes
$audit_status_msg
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
**Automated Fix Strategy**:
1. Run npm audit fix with proper error handling
2. Update @types/node to latest compatible version
3. Try Node 20 specific @types/node version if needed
4. Clean reinstall dependencies if conflicts persist
5. Update TypeScript compiler if necessary
---
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
EOF
fi
if [ -n "$fixes_applied" ]; then
# Add the fixes applied section to the file
sed -i "/This workflow attempts/a\\
\\
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
fi
# Create PR with appropriate labels
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
if [[ "$build_status" == *"fails"* ]]; then
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
fi
# Create PR
gh pr create -B main -H "$branch_name" \
--title "chore: npm audit fix with TypeScript auto-repair" \
--label "$labels" \
--body-file pr_body.txt
else
echo "No changes to commit"
fi

View File

@@ -1,137 +0,0 @@
name: NPM Audit Fix
on:
schedule:
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
workflow_dispatch:
jobs:
npm-audit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: "20"
- name: NPM install and audit fix
working-directory: src/Misc/expressionFunc/hashFiles
run: |
npm install
# Check what vulnerabilities exist
echo "=== Checking current vulnerabilities ==="
npm audit || true
# Apply audit fix --force to get security updates
echo "=== Applying npm audit fix --force ==="
npm audit fix --force
# Test if build still works and set status
echo "=== Testing build compatibility ==="
if npm run all; then
echo "✅ Build successful after audit fix"
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
else
echo "❌ Build failed after audit fix - will create PR with fix instructions"
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
fi
- name: Create PR if changes exist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Check if there are any changes
if [ -n "$(git status --porcelain)" ]; then
# Configure git
git config --global user.name "github-actions[bot]"
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
# Create branch and commit changes
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
git checkout -b "$branch_name"
git add .
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
git push origin "$branch_name"
# Create PR body based on what actually happened
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
cat > pr_body.txt << 'EOF'
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
**✅ Full Fix Applied Successfully**
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
**Changes made:**
- Applied `npm audit fix --force` to resolve security vulnerabilities
- Updated package-lock.json with security patches
- Verified build compatibility with `npm run all`
**Next steps:**
- Review the dependency changes
- Verify the hashFiles functionality still works as expected
- Merge when ready
---
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
EOF
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
cat > pr_body.txt << 'EOF'
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
This update applies important security patches but causes build failures that require manual fixes.
**Changes made:**
- Applied `npm audit fix --force` to resolve security vulnerabilities
- Updated package-lock.json with security patches
**⚠️ Build Issues Detected:**
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
**Required Manual Fixes:**
1. Review TypeScript compilation errors in the build output
2. Update TypeScript configuration if needed
3. Consider pinning `@types/node` to a compatible version
4. Run `npm run all` locally to verify fixes
**Next steps:**
- **DO NOT merge until build issues are resolved**
- Apply manual fixes for TypeScript compatibility
- Test the hashFiles functionality still works as expected
- Merge when build passes
---
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
EOF
else
# Fallback case
cat > pr_body.txt << 'EOF'
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
** No Changes Applied**
No security vulnerabilities were found or no changes were needed.
---
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
EOF
fi
# Create PR
gh pr create -B main -H "$branch_name" \
--title "chore: npm audit fix for hashFiles dependencies" \
--label "dependencies" \
--label "dependencies-weekly-check" \
--label "dependencies-not-dependabot" \
--label "npm" \
--label "typescript" \
--label "security" \
--body-file pr_body.txt
else
echo "✅ No changes to commit - npm audit fix did not modify any files"
fi

View File

@@ -11,12 +11,12 @@ jobs:
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
# Make sure ./releaseVersion match ./src/runnerversion
# Query GitHub release ensure version is not used
- name: Check version
uses: actions/github-script@v8
uses: actions/github-script@v7.0.1
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -86,7 +86,7 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
# Build runner layout
- name: Build & Layout Release
@@ -118,7 +118,7 @@ jobs:
# Upload runner package tar.gz/zip as artifact.
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v4
with:
name: runner-packages-${{ matrix.runtime }}
path: |
@@ -129,41 +129,41 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v5
# Download runner package tar.gz/zip produced by 'build' job
- name: Download Artifact (win-x64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-win-x64
path: ./
- name: Download Artifact (win-arm64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-win-arm64
path: ./
- name: Download Artifact (osx-x64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-osx-x64
path: ./
- name: Download Artifact (osx-arm64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-osx-arm64
path: ./
- name: Download Artifact (linux-x64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-linux-x64
path: ./
- name: Download Artifact (linux-arm)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-linux-arm
path: ./
- name: Download Artifact (linux-arm64)
uses: actions/download-artifact@v7
uses: actions/download-artifact@v5
with:
name: runner-packages-linux-arm64
path: ./
@@ -171,7 +171,7 @@ jobs:
# Create ReleaseNote file
- name: Create ReleaseNote
id: releaseNote
uses: actions/github-script@v8
uses: actions/github-script@v7.0.1
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
@@ -296,11 +296,11 @@ jobs:
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v6
uses: actions/checkout@v5
- name: Compute image version
id: image
uses: actions/github-script@v8
uses: actions/github-script@v7.0.1
with:
script: |
const fs = require('fs');
@@ -334,12 +334,11 @@ jobs:
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.licenses=MIT
annotations: |
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT
- name: Generate attestation
uses: actions/attest-build-provenance@v3
uses: actions/attest-build-provenance@v2
with:
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}

View File

@@ -7,7 +7,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v10
- uses: actions/stale@v9
with:
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."

View File

@@ -1 +1,6 @@
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
cd src/Misc/expressionFunc/hashFiles
npx lint-staged

View File

@@ -1,299 +0,0 @@
# DAP Debugging - Bug Fixes and Enhancements
**Status:** Planned
**Date:** January 2026
**Related:** [dap-debugging.md](./dap-debugging.md)
## Overview
This document tracks bug fixes and enhancements for the DAP debugging implementation after the initial phases were completed.
## Issues
### Bug 1: Double Output in REPL Shell Commands
**Symptom:** Running commands in the REPL shell produces double output - the first one unmasked, the second one with secrets masked.
**Root Cause:** In `DapDebugSession.ExecuteShellCommandAsync()` (lines 670-773), output is sent to the debugger twice:
1. **Real-time streaming (unmasked):** Lines 678-712 stream output via DAP `output` events as data arrives from the process - but this output is NOT masked
2. **Final result (masked):** Lines 765-769 return the combined output as `EvaluateResponseBody.Result` with secrets masked
The DAP client displays both the streamed events AND the evaluate response result, causing duplication.
**Fix:**
1. Mask secrets in the real-time streaming output (add `HostContext.SecretMasker.MaskSecrets()` to lines ~690 and ~708)
2. Change the final `Result` to only show exit code summary instead of full output
---
### Bug 2: Expressions Interpreted as Shell Commands
**Symptom:** Evaluating expressions like `${{github.event_name}} == 'push'` in the Watch/Expressions pane results in them being executed as shell commands instead of being evaluated as GitHub Actions expressions.
**Root Cause:** In `DapDebugSession.HandleEvaluateAsync()` (line 514), the condition to detect shell commands is too broad:
```csharp
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
```
Since `${{github.event_name}}` starts with `$`, it gets routed to shell execution instead of expression evaluation.
**Fix:**
1. Check for `${{` prefix first - these are always GitHub Actions expressions
2. Remove the `expression.StartsWith("$")` condition entirely (ambiguous and unnecessary since REPL context handles shell commands)
3. Keep `expression.StartsWith("!")` for explicit shell override in non-REPL contexts
---
### Enhancement: Expression Interpolation in REPL Commands
**Request:** When running REPL commands like `echo ${{github.event_name}}`, the `${{ }}` expressions should be expanded before shell execution, similar to how `run:` steps work.
**Approach:** Add a helper method that uses the existing `PipelineTemplateEvaluator` infrastructure to expand expressions in the command string before passing it to the shell.
---
## Implementation Details
### File: `src/Runner.Worker/Dap/DapDebugSession.cs`
#### Change 1: Mask Real-Time Streaming Output
**Location:** Lines ~678-712 (OutputDataReceived and ErrorDataReceived handlers)
**Before:**
```csharp
processInvoker.OutputDataReceived += (sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
output.AppendLine(args.Data);
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody
{
Category = "stdout",
Output = args.Data + "\n" // NOT MASKED
}
});
}
};
```
**After:**
```csharp
processInvoker.OutputDataReceived += (sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
output.AppendLine(args.Data);
var maskedData = HostContext.SecretMasker.MaskSecrets(args.Data);
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody
{
Category = "stdout",
Output = maskedData + "\n"
}
});
}
};
```
Apply the same change to `ErrorDataReceived` handler (~lines 696-712).
---
#### Change 2: Return Only Exit Code in Result
**Location:** Lines ~767-772 (return statement in ExecuteShellCommandAsync)
**Before:**
```csharp
return new EvaluateResponseBody
{
Result = result.TrimEnd('\r', '\n'),
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
```
**After:**
```csharp
return new EvaluateResponseBody
{
Result = $"(exit code: {exitCode})",
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
```
Also remove the result combination logic (lines ~747-762) since we no longer need to build the full result string for the response.
---
#### Change 3: Fix Expression vs Shell Routing
**Location:** Lines ~511-536 (HandleEvaluateAsync method)
**Before:**
```csharp
try
{
// Check if this is a REPL/shell command (context: "repl") or starts with shell prefix
if (evalContext == "repl" || expression.StartsWith("!") || expression.StartsWith("$"))
{
// Shell execution mode
var command = expression.TrimStart('!', '$').Trim();
// ...
}
else
{
// Expression evaluation mode
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
}
```
**After:**
```csharp
try
{
// GitHub Actions expressions start with "${{" - always evaluate as expressions
if (expression.StartsWith("${{"))
{
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
// Check if this is a REPL/shell command:
// - context is "repl" (from Debug Console pane)
// - expression starts with "!" (explicit shell prefix for Watch pane)
if (evalContext == "repl" || expression.StartsWith("!"))
{
// Shell execution mode
var command = expression.TrimStart('!').Trim();
if (string.IsNullOrEmpty(command))
{
return CreateSuccessResponse(new EvaluateResponseBody
{
Result = "(empty command)",
Type = "string",
VariablesReference = 0
});
}
var result = await ExecuteShellCommandAsync(command, executionContext);
return CreateSuccessResponse(result);
}
else
{
// Expression evaluation mode (Watch pane, hover, etc.)
var result = EvaluateExpression(expression, executionContext);
return CreateSuccessResponse(result);
}
}
```
---
#### Change 4: Add Expression Expansion Helper Method
**Location:** Add new method before `ExecuteShellCommandAsync` (~line 667)
```csharp
/// <summary>
/// Expands ${{ }} expressions within a command string.
/// For example: "echo ${{github.event_name}}" -> "echo push"
/// </summary>
private string ExpandExpressionsInCommand(string command, IExecutionContext context)
{
if (string.IsNullOrEmpty(command) || !command.Contains("${{"))
{
return command;
}
try
{
// Create a StringToken with the command
var token = new StringToken(null, null, null, command);
// Use the template evaluator to expand expressions
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var result = templateEvaluator.EvaluateStepDisplayName(
token,
context.ExpressionValues,
context.ExpressionFunctions);
// Mask secrets in the expanded command
result = HostContext.SecretMasker.MaskSecrets(result ?? command);
Trace.Info($"Expanded command: {result}");
return result;
}
catch (Exception ex)
{
Trace.Info($"Expression expansion failed, using original command: {ex.Message}");
return command;
}
}
```
**Required import:** Add `using GitHub.DistributedTask.ObjectTemplating.Tokens;` at the top of the file if not already present.
---
#### Change 5: Use Expression Expansion in Shell Execution
**Location:** Beginning of `ExecuteShellCommandAsync` method (~line 670)
**Before:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
Trace.Info($"Executing shell command: {command}");
// ...
}
```
**After:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
// Expand ${{ }} expressions in the command first
command = ExpandExpressionsInCommand(command, context);
Trace.Info($"Executing shell command: {command}");
// ...
}
```
---
## DAP Context Reference
For future reference, these are the DAP evaluate context values:
| DAP Context | Source UI | Behavior |
|-------------|-----------|----------|
| `"repl"` | Debug Console / REPL pane | Shell execution (with expression expansion) |
| `"watch"` | Watch / Expressions pane | Expression evaluation |
| `"hover"` | Editor hover (default) | Expression evaluation |
| `"variables"` | Variables pane | Expression evaluation |
| `"clipboard"` | Copy to clipboard | Expression evaluation |
---
## Testing Checklist
- [ ] REPL command output is masked and appears only once
- [ ] REPL command shows exit code in result field
- [ ] Expression `${{github.event_name}}` evaluates correctly in Watch pane
- [ ] Expression `${{github.event_name}} == 'push'` evaluates correctly
- [ ] REPL command `echo ${{github.event_name}}` expands and executes correctly
- [ ] REPL command `!ls -la` from Watch pane works (explicit shell prefix)
- [ ] Secrets are masked in all outputs (streaming and expanded commands)

View File

@@ -1,536 +0,0 @@
# DAP-Based Debugging for GitHub Actions Runner
**Status:** Draft
**Author:** GitHub Actions Team
**Date:** January 2026
## Progress Checklist
- [x] **Phase 1:** DAP Protocol Infrastructure (DapMessages.cs, DapServer.cs, basic DapDebugSession.cs)
- [x] **Phase 2:** Debug Session Logic (DapVariableProvider.cs, variable inspection, step history tracking)
- [x] **Phase 3:** StepsRunner Integration (pause hooks before/after step execution)
- [x] **Phase 4:** Expression Evaluation & Shell (REPL)
- [x] **Phase 5:** Startup Integration (JobRunner.cs modifications)
## Overview
This document describes the implementation of Debug Adapter Protocol (DAP) support in the GitHub Actions runner, enabling rich debugging of workflow jobs from any DAP-compatible editor (nvim-dap, VS Code, etc.).
## Goals
- **Primary:** Create a working demo to demonstrate the feasibility of DAP-based workflow debugging
- **Non-goal:** Production-ready, polished implementation (this is proof-of-concept)
## User Experience
1. User re-runs a failed job with "Enable debug logging" checked in GitHub UI
2. Runner (running locally) detects debug mode and starts DAP server on port 4711
3. Runner prints "Waiting for debugger on port 4711..." and pauses
4. User opens editor (nvim with nvim-dap), connects to debugger
5. Job execution begins, pausing before the first step
6. User can:
- **Inspect variables:** View `github`, `env`, `inputs`, `steps`, `secrets` (redacted), `runner`, `job` contexts
- **Evaluate expressions:** `${{ github.event.pull_request.title }}`
- **Execute shell commands:** Run arbitrary commands in the job's environment (REPL)
- **Step through job:** `next` moves to next step, `continue` runs to end
- **Pause after steps:** Inspect step outputs before continuing
## Activation
DAP debugging activates automatically when the job is in debug mode:
- User enables "Enable debug logging" when re-running a job in GitHub UI
- Server sends `ACTIONS_STEP_DEBUG=true` in job variables
- Runner sets `Global.WriteDebug = true` and `runner.debug = "1"`
- DAP server starts on port 4711
**No additional configuration required.**
### Optional Configuration
| Environment Variable | Default | Description |
|---------------------|---------|-------------|
| `ACTIONS_DAP_PORT` | `4711` | TCP port for DAP server (optional override) |
## Architecture
```
┌─────────────────────┐ ┌─────────────────────────────────────────┐
│ nvim-dap │ │ Runner.Worker │
│ (DAP Client) │◄───TCP:4711───────►│ ┌─────────────────────────────────┐ │
│ │ │ │ DapServer │ │
└─────────────────────┘ │ │ - TCP listener │ │
│ │ - DAP JSON protocol │ │
│ └──────────────┬──────────────────┘ │
│ │ │
│ ┌──────────────▼──────────────────┐ │
│ │ DapDebugSession │ │
│ │ - Debug state management │ │
│ │ - Step coordination │ │
│ │ - Variable exposure │ │
│ │ - Expression evaluation │ │
│ │ - Shell execution (REPL) │ │
│ └──────────────┬──────────────────┘ │
│ │ │
│ ┌──────────────▼──────────────────┐ │
│ │ StepsRunner (modified) │ │
│ │ - Pause before/after steps │ │
│ │ - Notify debug session │ │
│ └─────────────────────────────────┘ │
└─────────────────────────────────────────┘
```
## DAP Concept Mapping
| DAP Concept | Actions Runner Equivalent |
|-------------|---------------------------|
| Thread | Single job execution |
| Stack Frame | Current step + completed steps (step history) |
| Scope | Context category: `github`, `env`, `inputs`, `steps`, `secrets`, `runner`, `job` |
| Variable | Individual context values |
| Breakpoint | Pause before specific step (future enhancement) |
| Step Over (Next) | Execute current step, pause before next |
| Continue | Run until job end |
| Evaluate | Evaluate `${{ }}` expressions OR execute shell commands (REPL) |
## File Structure
```
src/Runner.Worker/
├── Dap/
│ ├── DapServer.cs # TCP listener, JSON protocol handling
│ ├── DapDebugSession.cs # Debug state, step coordination
│ ├── DapMessages.cs # DAP protocol message types
│ └── DapVariableProvider.cs # Converts ExecutionContext to DAP variables
```
## Implementation Phases
### Phase 1: DAP Protocol Infrastructure
#### 1.1 Protocol Messages (`Dap/DapMessages.cs`)
Base message types following DAP spec:
```csharp
public abstract class ProtocolMessage
{
public int seq { get; set; }
public string type { get; set; } // "request", "response", "event"
}
public class Request : ProtocolMessage
{
public string command { get; set; }
public object arguments { get; set; }
}
public class Response : ProtocolMessage
{
public int request_seq { get; set; }
public bool success { get; set; }
public string command { get; set; }
public string message { get; set; }
public object body { get; set; }
}
public class Event : ProtocolMessage
{
public string @event { get; set; }
public object body { get; set; }
}
```
Message framing: `Content-Length: N\r\n\r\n{json}`
#### 1.2 DAP Server (`Dap/DapServer.cs`)
```csharp
[ServiceLocator(Default = typeof(DapServer))]
public interface IDapServer : IRunnerService
{
Task StartAsync(int port);
Task WaitForConnectionAsync();
Task StopAsync();
void SendEvent(Event evt);
}
public sealed class DapServer : RunnerService, IDapServer
{
private TcpListener _listener;
private TcpClient _client;
private IDapDebugSession _session;
// TCP listener on configurable port
// Single-client connection
// Async read/write loop
// Dispatch requests to DapDebugSession
}
```
### Phase 2: Debug Session Logic
#### 2.1 Debug Session (`Dap/DapDebugSession.cs`)
```csharp
public enum DapCommand { Continue, Next, Pause, Disconnect }
public enum PauseReason { Entry, Step, Breakpoint, Pause }
[ServiceLocator(Default = typeof(DapDebugSession))]
public interface IDapDebugSession : IRunnerService
{
bool IsActive { get; }
// Called by DapServer
void Initialize(InitializeRequestArguments args);
void Attach(AttachRequestArguments args);
void ConfigurationDone();
Task<DapCommand> WaitForCommandAsync();
// Called by StepsRunner
Task OnStepStartingAsync(IStep step, IExecutionContext jobContext);
void OnStepCompleted(IStep step);
// DAP requests
ThreadsResponse GetThreads();
StackTraceResponse GetStackTrace(int threadId);
ScopesResponse GetScopes(int frameId);
VariablesResponse GetVariables(int variablesReference);
EvaluateResponse Evaluate(string expression, string context);
}
public sealed class DapDebugSession : RunnerService, IDapDebugSession
{
private IExecutionContext _jobContext;
private IStep _currentStep;
private readonly List<IStep> _completedSteps = new();
private TaskCompletionSource<DapCommand> _commandTcs;
private bool _pauseAfterStep = false;
// Object reference management for nested variables
private int _nextVariableReference = 1;
private readonly Dictionary<int, object> _variableReferences = new();
}
```
Core state machine:
1. **Waiting for client:** Server started, no client connected
2. **Initializing:** Client connected, exchanging capabilities
3. **Ready:** `configurationDone` received, waiting to start
4. **Paused (before step):** Stopped before step execution, waiting for command
5. **Running:** Executing a step
6. **Paused (after step):** Stopped after step execution, waiting for command
#### 2.2 Variable Provider (`Dap/DapVariableProvider.cs`)
Maps `ExecutionContext.ExpressionValues` to DAP scopes and variables:
| Scope | Source | Notes |
|-------|--------|-------|
| `github` | `ExpressionValues["github"]` | Full github context |
| `env` | `ExpressionValues["env"]` | Environment variables |
| `inputs` | `ExpressionValues["inputs"]` | Step inputs (when available) |
| `steps` | `Global.StepsContext.GetScope()` | Completed step outputs |
| `secrets` | `ExpressionValues["secrets"]` | Keys shown, values = `[REDACTED]` |
| `runner` | `ExpressionValues["runner"]` | Runner context |
| `job` | `ExpressionValues["job"]` | Job status |
Nested objects (e.g., `github.event.pull_request`) become expandable variables with child references.
### Phase 3: StepsRunner Integration
#### 3.1 Modify `StepsRunner.cs`
Add debug hooks at step boundaries:
```csharp
public async Task RunAsync(IExecutionContext jobContext)
{
// Get debug session if available
var debugSession = HostContext.TryGetService<IDapDebugSession>();
bool isFirstStep = true;
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
{
// ... existing dequeue logic ...
var step = jobContext.JobSteps.Dequeue();
// Pause BEFORE step execution
if (debugSession?.IsActive == true)
{
var reason = isFirstStep ? PauseReason.Entry : PauseReason.Step;
await debugSession.OnStepStartingAsync(step, jobContext, reason);
isFirstStep = false;
}
// ... existing step execution (condition eval, RunStepAsync, etc.) ...
// Pause AFTER step execution (if requested)
if (debugSession?.IsActive == true)
{
debugSession.OnStepCompleted(step);
// Session may pause here to let user inspect outputs
}
}
}
```
### Phase 4: Expression Evaluation & Shell (REPL)
#### 4.1 Expression Evaluation
Reuse existing `PipelineTemplateEvaluator`:
```csharp
private EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
{
// Strip ${{ }} wrapper if present
var expr = expression.Trim();
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
{
expr = expr.Substring(3, expr.Length - 5).Trim();
}
var expressionToken = new BasicExpressionToken(fileId: null, line: null, column: null, expression: expr);
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var result = templateEvaluator.EvaluateStepDisplayName(
expressionToken,
context.ExpressionValues,
context.ExpressionFunctions
);
// Mask secrets and determine type
result = HostContext.SecretMasker.MaskSecrets(result ?? "null");
return new EvaluateResponseBody
{
Result = result,
Type = DetermineResultType(result),
VariablesReference = 0
};
}
```
**Supported expression formats:**
- Plain expression: `github.ref`, `steps.build.outputs.result`
- Wrapped expression: `${{ github.event.pull_request.title }}`
#### 4.2 Shell Execution (REPL)
Shell execution is triggered when:
1. The evaluate request has `context: "repl"`, OR
2. The expression starts with `!` (e.g., `!ls -la`), OR
3. The expression starts with `$` followed by a shell command (e.g., `$env`)
**Usage examples in debug console:**
```
!ls -la # List files in workspace
!env | grep GITHUB # Show GitHub environment variables
!cat $GITHUB_EVENT_PATH # View the event payload
!echo ${{ github.ref }} # Mix shell and expression (evaluated first)
```
**Implementation:**
```csharp
private async Task<EvaluateResponseBody> ExecuteShellCommandAsync(string command, IExecutionContext context)
{
var processInvoker = HostContext.CreateService<IProcessInvoker>();
var output = new StringBuilder();
processInvoker.OutputDataReceived += (sender, args) =>
{
output.AppendLine(args.Data);
// Stream to client in real-time via DAP output event
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody { Category = "stdout", Output = args.Data + "\n" }
});
};
processInvoker.ErrorDataReceived += (sender, args) =>
{
_server?.SendEvent(new Event
{
EventType = "output",
Body = new OutputEventBody { Category = "stderr", Output = args.Data + "\n" }
});
};
// Build environment from job context (includes GITHUB_*, env context, prepend path)
var env = BuildShellEnvironment(context);
var workDir = GetWorkingDirectory(context); // Uses github.workspace
var (shell, shellArgs) = GetDefaultShell(); // Platform-specific detection
int exitCode = await processInvoker.ExecuteAsync(
workingDirectory: workDir,
fileName: shell,
arguments: string.Format(shellArgs, command),
environment: env,
requireExitCodeZero: false,
cancellationToken: CancellationToken.None
);
return new EvaluateResponseBody
{
Result = HostContext.SecretMasker.MaskSecrets(output.ToString()),
Type = exitCode == 0 ? "string" : "error",
VariablesReference = 0
};
}
```
**Shell detection by platform:**
| Platform | Priority | Shell | Arguments |
|----------|----------|-------|-----------|
| Windows | 1 | `pwsh` | `-NoProfile -NonInteractive -Command "{0}"` |
| Windows | 2 | `powershell` | `-NoProfile -NonInteractive -Command "{0}"` |
| Windows | 3 | `cmd.exe` | `/C "{0}"` |
| Unix | 1 | `bash` | `-c "{0}"` |
| Unix | 2 | `sh` | `-c "{0}"` |
**Environment built for shell commands:**
- Current system environment variables
- GitHub Actions context variables (from `IEnvironmentContextData.GetRuntimeEnvironmentVariables()`)
- Prepend path from job context added to `PATH`
### Phase 5: Startup Integration
#### 5.1 Modify `JobRunner.cs`
Add DAP server startup after debug mode is detected (around line 159):
```csharp
if (jobContext.Global.WriteDebug)
{
jobContext.SetRunnerContext("debug", "1");
// Start DAP server for interactive debugging
var dapServer = HostContext.GetService<IDapServer>();
var port = int.Parse(
Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT") ?? "4711");
await dapServer.StartAsync(port);
Trace.Info($"DAP server listening on port {port}");
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
// Block until debugger connects
await dapServer.WaitForConnectionAsync();
Trace.Info("DAP client connected, continuing job execution");
}
```
## DAP Capabilities
Capabilities to advertise in `InitializeResponse`:
```json
{
"supportsConfigurationDoneRequest": true,
"supportsEvaluateForHovers": true,
"supportsTerminateDebuggee": true,
"supportsStepBack": false,
"supportsSetVariable": false,
"supportsRestartFrame": false,
"supportsGotoTargetsRequest": false,
"supportsStepInTargetsRequest": false,
"supportsCompletionsRequest": false,
"supportsModulesRequest": false,
"supportsExceptionOptions": false,
"supportsValueFormattingOptions": false,
"supportsExceptionInfoRequest": false,
"supportsDelayedStackTraceLoading": false,
"supportsLoadedSourcesRequest": false,
"supportsProgressReporting": false,
"supportsRunInTerminalRequest": false
}
```
## Client Configuration (nvim-dap)
Example configuration for nvim-dap:
```lua
local dap = require('dap')
dap.adapters.actions = {
type = 'server',
host = '127.0.0.1',
port = 4711,
}
dap.configurations.yaml = {
{
type = 'actions',
request = 'attach',
name = 'Attach to Actions Runner',
}
}
```
## Demo Flow
1. Trigger job re-run with "Enable debug logging" checked in GitHub UI
2. Runner starts, detects debug mode (`Global.WriteDebug == true`)
3. DAP server starts, console shows: `DAP debugger waiting for connection on port 4711...`
4. In nvim: `:lua require('dap').continue()`
5. Connection established, capabilities exchanged
6. Job begins, pauses before first step
7. nvim shows "stopped" state, variables panel shows contexts
8. User explores variables, evaluates expressions, runs shell commands
9. User presses `n` (next) to advance to next step
10. After step completes, user can inspect outputs before continuing
11. Repeat until job completes
## Testing Strategy
1. **Unit tests:** DAP protocol serialization, variable provider mapping
2. **Integration tests:** Mock DAP client verifying request/response sequences
3. **Manual testing:** Real job with nvim-dap attached
## Future Enhancements (Out of Scope for Demo)
- Composite action step-in (expand into sub-steps)
- Breakpoints on specific step names
- Watch expressions
- Conditional breakpoints
- Remote debugging (runner not on localhost)
- VS Code extension
## Estimated Effort
| Phase | Effort |
|-------|--------|
| Phase 1: Protocol Infrastructure | 4-6 hours |
| Phase 2: Debug Session Logic | 4-6 hours |
| Phase 3: StepsRunner Integration | 2-3 hours |
| Phase 4: Expression & Shell | 3-4 hours |
| Phase 5: Startup & Polish | 2-3 hours |
| **Total** | **~2-3 days** |
## Key Files to Modify
| File | Changes |
|------|---------|
| `src/Runner.Worker/JobRunner.cs` | Start DAP server when debug mode enabled |
| `src/Runner.Worker/StepsRunner.cs` | Add pause hooks before/after step execution |
| `src/Runner.Worker/Runner.Worker.csproj` | Add new Dap/ folder files |
## Key Files to Create
| File | Purpose |
|------|---------|
| `src/Runner.Worker/Dap/DapServer.cs` | TCP server, protocol framing |
| `src/Runner.Worker/Dap/DapDebugSession.cs` | Debug state machine, command handling |
| `src/Runner.Worker/Dap/DapMessages.cs` | Protocol message types |
| `src/Runner.Worker/Dap/DapVariableProvider.cs` | Context → DAP variable conversion |
## Reference Links
- [DAP Overview](https://microsoft.github.io/debug-adapter-protocol/overview)
- [DAP Specification](https://microsoft.github.io/debug-adapter-protocol/specification)
- [Enable Debug Logging (GitHub Docs)](https://docs.github.com/en/actions/how-tos/monitor-workflows/enable-debug-logging)

View File

@@ -12,28 +12,33 @@ The runner is the application that runs a job from a GitHub Actions workflow. It
For more information about installing and using self-hosted runners, see [Adding self-hosted runners](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners) and [Using self-hosted runners in a workflow](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-self-hosted-runners-in-a-workflow)
Runner releases:
## Download and Install
![win](docs/res/win_sm.png) [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases)
![win](docs/res/win_sm.png) **Windows**: [Prerequisites](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases)
![macOS](docs/res/apple_sm.png) [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases)
![macOS](docs/res/apple_sm.png) **macOS**: [Prerequisites](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases)
![linux](docs/res/linux_sm.png) [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
![linux](docs/res/linux_sm.png) **Linux**: [Prerequisites](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
### Note
## Documentation and Resources
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
- 📚 **[Complete Documentation Index](docs/README.md)** - Comprehensive guide to all documentation
- 📖 **[Contributing Guide](docs/contribute.md)** - Development setup, building, and testing
- 🔧 **[Automation Scripts](docs/automate.md)** - Automate runner setup and configuration
- 🛠️ **[Troubleshooting Guides](docs/checks/README.md)** - Common issues and solutions
- 🏗️ **[Architecture Decision Records](docs/adrs/README.md)** - Important architectural decisions
- ⚙️ **Platform Prerequisites:** [Linux](docs/start/envlinux.md) | [Windows](docs/start/envwin.md) | [macOS](docs/start/envosx.md)
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features were working on and what stage theyre in.
## Support and Community
We are taking the following steps to better direct requests related to GitHub Actions, including:
Thank you for your interest in this repository. Please note our current contribution and support guidelines:
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
**Bug Reports:** You are welcome to report bugs in this repository through Issues.
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
**Feature Requests:** Please submit feature and enhancement requests on the [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions) page.
3. Security Issues should be handled as per our [security.md](security.md)
**Support Questions:** For help and support, please use our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions).
We will still provide security updates for this project and fix major breaking changes during this time.
**Security Issues:** Please report security vulnerabilities following our [security policy](security.md).
You are welcome to still raise bugs in this repo.
**High Priority Issues:** Critical bugs can be reported through Community Discussions or our [support team](https://support.github.com/contact/bug-report).

61
docs/README.md Normal file
View File

@@ -0,0 +1,61 @@
# GitHub Actions Runner Documentation
Welcome to the GitHub Actions Runner documentation. This guide will help you get started with self-hosted runners, contribute to the project, and troubleshoot common issues.
## 🚀 Getting Started
### Installation and Setup
- **[Linux Prerequisites](start/envlinux.md)** - Complete setup guide for Linux systems
- **[Windows Prerequisites](start/envwin.md)** - Complete setup guide for Windows systems
- **[macOS Prerequisites](start/envosx.md)** - Complete setup guide for macOS systems
### Quick Start
1. Download the [latest runner release](https://github.com/actions/runner/releases)
2. Follow the platform-specific prerequisites guide above
3. Configure your runner with `./config.sh` (Linux/macOS) or `.\config.cmd` (Windows)
4. Start the runner with `./run.sh` (Linux/macOS) or `.\run.cmd` (Windows)
## 🔧 Administration and Automation
- **[Automation Scripts](automate.md)** - Automate runner deployment and management
- **[Troubleshooting Guides](checks/)** - Common issues and solutions
## 🏗️ Development and Contributing
- **[Contributing Guide](contribute.md)** - Development setup, building, and testing
- **[Architecture Decision Records](adrs/README.md)** - Important architectural decisions and design patterns
## 📋 Reference Materials
### System Checks and Troubleshooting
- **[Network Connectivity](checks/network.md)** - Troubleshoot network issues
- **[Git Configuration](checks/git.md)** - Git-related problems
- **[Actions Troubleshooting](checks/actions.md)** - Action-specific issues
- **[SSL Certificate Issues](checks/sslcert.md)** - Certificate and TLS problems
- **[Node.js Issues](checks/nodejs.md)** - Node.js runtime problems
- **[Internet Connectivity](checks/internet.md)** - General connectivity issues
### Development Resources
- **[Visual Studio Code Setup](contribute/vscode.md)** - IDE configuration for development
- **[Design Documentation](design/)** - Technical design documents
## 🆘 Getting Help
### Community Support
- **[GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)** - Ask questions and get help from the community
- **[GitHub Support](https://support.github.com/contact/bug-report)** - Report critical bugs or get professional support
### Reporting Issues
- **Bug Reports**: Open an issue in this repository
- **Feature Requests**: Use [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions-and-packages)
- **Security Issues**: Follow our [security policy](../security.md)
## 📖 Additional Resources
- **[GitHub Actions Documentation](https://docs.github.com/en/actions)** - Official GitHub Actions documentation
- **[Self-hosted Runners Guide](https://docs.github.com/en/actions/hosting-your-own-runners)** - GitHub's official self-hosted runner documentation
- **[Runner Releases](https://github.com/actions/runner/releases)** - Download the latest runner versions
---
> **Note**: This documentation is maintained by the GitHub Actions team and the community. If you find any issues or have suggestions for improvement, please open an issue or contribute a pull request.

View File

@@ -76,3 +76,76 @@ Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just
```bash
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/delete.sh | bash -s yourorg/yourrepo runnername
```
## Troubleshooting
### Common Issues
#### Permission Denied
```bash
# Ensure scripts have execute permissions
chmod +x ./config.sh ./run.sh
```
#### PAT Token Issues
```bash
# Verify your PAT has the correct scopes:
# - repo (for repository-level runners)
# - admin:org (for organization-level runners)
export RUNNER_CFG_PAT=your_token_here
echo $RUNNER_CFG_PAT # Verify it's set
```
#### Network Connectivity
```bash
# Test GitHub connectivity
curl -H "Authorization: token $RUNNER_CFG_PAT" https://api.github.com/user
# For GitHub Enterprise Server
curl -H "Authorization: token $RUNNER_CFG_PAT" https://your-github-enterprise/api/v3/user
```
#### Service Installation Fails
```bash
# Check if running as appropriate user
whoami
# For Linux - ensure systemd is available
systemctl --version
# For macOS - ensure launchd is available
launchctl version
```
#### Runner Registration Fails
```bash
# Check if runner already exists
curl -H "Authorization: token $RUNNER_CFG_PAT" \
"https://api.github.com/repos/OWNER/REPO/actions/runners"
# Remove existing runner if needed
./config.sh remove --token $RUNNER_CFG_PAT
```
### Getting Help
- **Configuration Issues**: Check the [Prerequisites](start/envlinux.md) for your platform
- **Network Problems**: Review [network troubleshooting guide](checks/network.md)
- **General Support**: Visit [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)
### Advanced Examples
#### Organization-level Runner with Custom Labels
```bash
export RUNNER_CFG_PAT=your_org_pat
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/create-latest-svc.sh | \
bash -s -- -s myorg -n prod-runner-1 -l production,linux,docker
```
#### Repository-level Runner for GitHub Enterprise
```bash
export RUNNER_CFG_PAT=your_ghe_pat
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/create-latest-svc.sh | \
bash -s -- -s myorg/myrepo -g github.company.com -n build-server -u builder
```
```

66
docs/checks/README.md Normal file
View File

@@ -0,0 +1,66 @@
# Troubleshooting Guides
This directory contains troubleshooting guides for common issues you might encounter when setting up or running GitHub Actions self-hosted runners.
## Quick Reference
| Issue Type | Guide | Description |
|------------|-------|-------------|
| 🌐 **Network** | [network.md](network.md) | Connection issues, proxy, firewall problems |
| 🔒 **SSL/TLS** | [sslcert.md](sslcert.md) | Certificate and TLS handshake issues |
| 📦 **Git** | [git.md](git.md) | Git configuration and repository access |
| ⚡ **Actions** | [actions.md](actions.md) | Action-specific runtime issues |
| 🟢 **Node.js** | [nodejs.md](nodejs.md) | Node.js runtime and npm issues |
| 🌍 **Internet** | [internet.md](internet.md) | General internet connectivity |
## Common First Steps
Before diving into specific guides, try these general troubleshooting steps:
### 1. Check Basic Connectivity
```bash
# Test GitHub API access
curl -I https://api.github.com/
# For GitHub Enterprise Server
curl -I https://your-github-enterprise.com/api/v3/
```
### 2. Verify Runner Status
```bash
# Check if runner service is running
./svc.sh status
# View recent logs
tail -f _diag/Runner_*.log
```
### 3. Test Runner Configuration
```bash
# Re-run configuration
./config.sh
# Test connection without running
./run.sh --check
```
## Getting Additional Help
If these guides don't resolve your issue:
1. **Search existing issues** in the [runner repository](https://github.com/actions/runner/issues)
2. **Check GitHub Status** at [githubstatus.com](https://githubstatus.com)
3. **Ask the community** in [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)
4. **Contact support** for critical issues via [GitHub Support](https://support.github.com/contact)
## Contributing
Found a solution to a common problem not covered here? Consider contributing:
1. Create a new `.md` file for the issue type
2. Follow the format of existing guides
3. Submit a pull request with your improvements
---
💡 **Tip**: Always check the `_diag/` directory for detailed log files when troubleshooting issues.

View File

@@ -1,5 +1,24 @@
# Contributions
## Table of Contents
- [Getting Started](#getting-started)
- [Issues](#issues)
- [Enhancements and Feature Requests](#enhancements-and-feature-requests)
- [Required Dev Dependencies](#required-dev-dependencies)
- [Quickstart: Run a Job from a Real Repository](#quickstart-run-a-job-from-a-real-repository)
- [Development Life Cycle](#development-life-cycle)
- [Clone Repository](#clone-repository)
- [Build Layout](#build-layout)
- [Test Layout](#test-layout)
- [Configure Runner](#configure-runner)
- [Run Runner](#run-runner)
- [View Logs](#view-logs)
- [Editors](#editors)
- [Styling](#styling)
## Getting Started
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
@@ -124,8 +143,8 @@ cd runner/_layout
./config.(sh/cmd) # configure your custom runner
```
You will need your the name of your repository and a runner registration token.
Check [Quickstart](##Quickstart:-Run-a-job-from-a-real-repository) if you don't know how to get this token.
You will need the name of your repository and a runner registration token.
Check the [Quickstart section](#quickstart-run-a-job-from-a-real-repository) if you don't know how to get this token.
These can also be passed down as arguments to `config.(sh/cmd)`:
```bash

View File

@@ -1,217 +0,0 @@
# Runner Dependency Management Process
## Overview
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
## Release Schedule
- **Monthly Runner Releases**: New runner versions are released monthly
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
## Automated Workflows
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
### 1. Foundation Labels
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
- **Purpose**: Creates consistent dependency labels for all automation workflows
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
- **Prerequisite**: Must be merged before other workflows for proper labeling
### 2. Node.js Version Updates
- **Workflow**: `.github/workflows/node-upgrade.yml`
- **Schedule**: Mondays at 6:00 AM UTC
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
- **Priority**: First (NPM depends on current Node.js versions)
### 3. NPM Security Audit
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
- **Schedule**: Mondays at 7:00 AM UTC
- **Purpose**: Automated security vulnerability detection and basic fixes
- **Location**: `src/Misc/expressionFunc/hashFiles/`
- **Features**: npm audit, security patch application, PR creation
- **Dependency**: Runs after Node.js updates for optimal compatibility
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
- **Trigger**: Manual dispatch only
- **Purpose**: Manual security audit with TypeScript compatibility fixes
- **Use Case**: When scheduled workflow fails or needs custom intervention
- **Features**: Enhanced TypeScript auto-repair, graduated security response
- **How to Use**:
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
3. Click "Run workflow" and optionally specify fix level (auto/manual)
4. Review the generated PR for TypeScript compatibility issues
### 4. .NET SDK Updates
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
- **Schedule**: Mondays at midnight UTC
- **Purpose**: Updates .NET SDK and package versions with build validation
- **Features**: Global.json updates, NuGet package management, compatibility checking
- **Independence**: Runs independently of Node.js/NPM updates
### 5. Docker/Buildx Updates
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
- **Schedule**: Mondays at midnight UTC
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
- **Features**: Container security scanning, multi-architecture build testing
- **Independence**: Runs independently of other dependency updates
### 6. Dependency Monitoring
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
- **Schedule**: Mondays at 11:00 AM UTC
- **Purpose**: Comprehensive status report of all dependencies with security audit
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
- **Summary**: Runs last to capture results from all morning dependency updates
## Release Process Integration
### Pre-Release Checklist
Before each monthly runner release:
1. **Check Dependency PRs**:
```bash
# List all open dependency PRs
gh pr list --label "dependencies" --state open
# List only automated weekly dependency updates
gh pr list --label "dependencies-weekly-check" --state open
# List only custom dependency automation (not dependabot)
gh pr list --label "dependencies-not-dependabot" --state open
```
2. **Run Manual Dependency Check**:
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
- Review the summary for any outdated dependencies
3. **Review and Merge Updates**:
- Prioritize security-related updates
- Test dependency updates in development environment
- Merge approved dependency PRs
### Vulnerability Response
#### Critical Security Vulnerabilities
- **Response Time**: Within 24 hours
- **Process**:
1. Assess impact on runner security
2. Create hotfix branch if runner data security is affected
3. Expedite patch release if necessary
4. Document in security advisory if applicable
#### Non-Critical Vulnerabilities
- **Response Time**: Next monthly release
- **Process**:
1. Evaluate if vulnerability affects runner functionality
2. Include fix in regular dependency update cycle
3. Document in release notes
## Monitoring and Alerts
### GitHub Actions Workflow Status
- All dependency workflows create PRs with the `dependencies` label
- Failed workflows should be investigated immediately
- Weekly dependency status reports are generated automatically
### Manual Checks
You can manually trigger dependency checks:
- **Full Status**: Run "Dependency Status Check" workflow
- **Specific Component**: Use the dropdown to check individual dependencies
## Dependency Labels
All automated dependency PRs are tagged with labels for easy filtering and management:
### Primary Labels
- **`dependencies`**: All automated dependency-related PRs
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
- **`security`**: Security vulnerability fixes and patches
- **`typescript`**: TypeScript compatibility and type definition updates
- **`needs-manual-review`**: Complex updates requiring human verification
### Technology-Specific Labels
- **`node`**: Node.js version updates
- **`javascript`**: JavaScript runtime and tooling updates
- **`npm`**: NPM package and security updates
- **`dotnet`**: .NET SDK and NuGet package updates
- **`docker`**: Docker and container tooling updates
### Workflow-Specific Branches
- **Node.js updates**: `chore/update-node` branch
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
- **Docker updates**: `feature/docker-buildx-upgrade` branch
## Special Considerations
### Node.js Updates
When updating Node.js versions, remember to:
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
2. Follow the alpine_nodejs getting started guide
3. Test container builds with new Node versions
### .NET SDK Updates
- Only patch versions are auto-updated within the same major.minor version
- Major/minor version updates require manual review and testing
### Docker Updates
- Updates include both Docker Engine and Docker Buildx
- Verify compatibility with runner container workflows
## Troubleshooting
### Common Issues
1. **NPM Audit Workflow Fails**:
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
- Verify Node.js setup step succeeded
2. **Version Detection Fails**:
- Check if upstream APIs are available
- Verify parsing logic for version extraction
3. **PR Creation Fails**:
- Ensure `GITHUB_TOKEN` has sufficient permissions
- Check if branch already exists
### Contact
For questions about the dependency management process:
- Create an issue with the `dependencies` label
- Review existing dependency management workflows
- Consult the runner team for security-related concerns
## Metrics and KPIs
Track these metrics to measure dependency management effectiveness:
- Number of open dependency PRs at release time
- Time to merge dependency updates
- Number of security vulnerabilities by severity
- Release cycle adherence (monthly target)

View File

@@ -6,10 +6,21 @@
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)."
## Install .Net Core 3.x Linux Dependencies
## Quick Setup
The `./config.sh` script will automatically check and guide you through installing .NET dependencies:
```bash
./config.sh
# If dependencies are missing, run:
./bin/installdependencies.sh
```
## Install .NET Core Linux Dependencies
The `./config.sh` will check .NET Core dependencies during runner configuration.
You might see something like this which indicates a dependency is missing:
The `./config.sh` will check .Net Core 3.x dependencies during runner configuration.
You might see something like this which indicate a dependency's missing.
```bash
./config.sh
libunwind.so.8 => not found
@@ -17,34 +28,87 @@ You might see something like this which indicate a dependency's missing.
Dependencies is missing for Dotnet Core 6.0
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies.
```
You can easily correct the problem by executing `./bin/installdependencies.sh`.
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions
> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`).
### Full dependencies list
> **Note:** The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`).
Debian based OS (Debian, Ubuntu, Linux Mint)
## Manual Dependency Installation
If the automatic installation doesn't work, you can manually install dependencies using your package manager:
### Debian based OS (Debian, Ubuntu, Linux Mint)
```bash
sudo apt-get update
sudo apt-get install -y liblttng-ust1 libkrb5-3 zlib1g libssl1.1 libicu66
```
**Required packages:**
- liblttng-ust1 or liblttng-ust0
- libkrb5-3
- zlib1g
- libssl1.1, libssl1.0.2 or libssl1.0.0
- libicu63, libicu60, libicu57 or libicu55
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
### Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
```bash
sudo yum install -y lttng-ust openssl-libs krb5-libs zlib libicu
# Or for newer systems:
sudo dnf install -y lttng-ust openssl-libs krb5-libs zlib libicu
```
**Required packages:**
- lttng-ust
- openssl-libs
- krb5-libs
- zlib
- libicu
SUSE based OS (OpenSUSE, SUSE Enterprise)
### SUSE based OS (OpenSUSE, SUSE Enterprise)
```bash
sudo zypper install -y lttng-ust libopenssl1_1 krb5 zlib libicu60_2
```
**Required packages:**
- lttng-ust
- libopenssl1_1
- krb5
- zlib
- libicu60_2
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x)
## Troubleshooting
### Common Issues
**Permission denied errors:**
```bash
sudo chmod +x ./config.sh ./run.sh
```
**Missing dependencies after installation:**
```bash
# Check what's missing
ldd ./bin/Runner.Listener
# Reinstall dependencies
./bin/installdependencies.sh
```
**SSL/TLS errors:**
```bash
# Update certificates
sudo apt-get update && sudo apt-get install ca-certificates
# Or for RHEL/CentOS:
sudo yum update ca-certificates
```
### Getting Help
- Check our [troubleshooting guide](../checks/README.md)
- Search [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)
- Review [common network issues](../checks/network.md)
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x)

View File

@@ -1,9 +1,136 @@
# ![osx](../res/apple_med.png) macOS/OS X System Prerequisites
# ![osx](../res/apple_med.png) macOS System Prerequisites
## Supported Versions
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)."
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
## Quick Setup
1. **Download** the latest runner from [releases](https://github.com/actions/runner/releases)
2. **Extract** the downloaded archive: `tar xzf actions-runner-osx-x64-*.tar.gz`
3. **Run** `./config.sh` to configure the runner
4. **Install** as a service: `sudo ./svc.sh install` and `sudo ./svc.sh start`
## System Requirements
### macOS Version
- macOS 10.15 (Catalina) or later
- Both Intel (x64) and Apple Silicon (ARM64) are supported
### Required Tools
#### Homebrew (Recommended)
Install Homebrew for easy package management:
```bash
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
```
#### Development Tools
```bash
# Install Xcode Command Line Tools
xcode-select --install
# Install essential development tools via Homebrew
brew install git curl wget
```
### .NET Runtime
- .NET 6.0 runtime (automatically included with the runner)
## Setup Steps
### 1. Download and Extract
```bash
# Create runner directory
mkdir ~/actions-runner && cd ~/actions-runner
# Download latest release (replace with actual version)
curl -O -L https://github.com/actions/runner/releases/download/v2.xyz.z/actions-runner-osx-x64-2.xyz.z.tar.gz
# Extract
tar xzf ./actions-runner-osx-x64-2.xyz.z.tar.gz
```
### 2. Configure
```bash
./config.sh --url https://github.com/YOUR_ORG/YOUR_REPO --token YOUR_TOKEN
```
### 3. Run as Service (macOS)
```bash
# Install as launchd service
sudo ./svc.sh install
# Start the service
sudo ./svc.sh start
# Check status
sudo ./svc.sh status
```
### 4. Run Interactively (Alternative)
```bash
./run.sh
```
## macOS-Specific Considerations
### Security & Privacy
- Allow the runner executable through macOS Gatekeeper
- Grant necessary permissions in System Preferences > Security & Privacy
### Code Signing
For repositories that build macOS applications:
```bash
# Install certificates for code signing
security import developer_certificates.p12 -k ~/Library/Keychains/login.keychain
```
### Xcode Integration
If building iOS/macOS apps:
```bash
# Install Xcode from App Store or developer portal
# Set Xcode path
sudo xcode-select -s /Applications/Xcode.app/Contents/Developer
```
## Troubleshooting
### Common Issues
**Permission Denied:**
```bash
chmod +x ./config.sh ./run.sh ./svc.sh
```
**Gatekeeper Issues:**
```bash
# Allow unsigned binary to run
sudo spctl --master-disable
# Or specifically allow the runner
sudo spctl --add ./bin/Runner.Listener
```
**Service Not Starting:**
```bash
# Check system logs
sudo ./svc.sh status
tail -f ~/Library/Logs/Runner_*.log
```
**Path Issues:**
```bash
# Ensure proper PATH in your shell profile
echo 'export PATH="/usr/local/bin:$PATH"' >> ~/.zshrc
source ~/.zshrc
```
### Getting Help
- Check our [troubleshooting guide](../checks/README.md)
- Review [common network issues](../checks/network.md)
- Search [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)
## [More .NET Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)

View File

@@ -4,4 +4,92 @@
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)."
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
## Quick Setup
1. **Download** the latest runner from [releases](https://github.com/actions/runner/releases)
2. **Extract** the downloaded archive to your desired directory
3. **Run** `config.cmd` as Administrator to configure the runner
4. **Install** as a service (optional): `svc.sh install` and `svc.sh start`
## System Requirements
### .NET Runtime
- .NET 6.0 runtime (automatically installed with the runner)
- Windows PowerShell 5.1 or PowerShell Core 6.0+
### Windows Features
Windows runners require the following components:
```powershell
# Enable required Windows features (run as Administrator)
Enable-WindowsOptionalFeature -Online -FeatureName Microsoft-Windows-Subsystem-Linux
```
### Visual Studio Build Tools (For builds requiring compilation)
For repositories that need to compile code, install:
- **Visual Studio 2017 or newer** [Install here](https://visualstudio.microsoft.com)
- **Visual Studio 2022 17.3 Preview or later** (for ARM64) [Install here](https://docs.microsoft.com/en-us/visualstudio/releases/2022/release-notes-preview)
### Git for Windows
- **Git for Windows** [Install here](https://git-scm.com/downloads) (required for repository operations)
## Common Setup Steps
### 1. Create Runner Directory
```cmd
mkdir C:\actions-runner
cd C:\actions-runner
```
### 2. Download and Extract
```powershell
# Download latest release
Invoke-WebRequest -Uri "https://github.com/actions/runner/releases/download/v2.xyz.z/actions-runner-win-x64-2.xyz.z.zip" -OutFile "actions-runner.zip"
# Extract
Expand-Archive -Path "actions-runner.zip" -DestinationPath "."
```
### 3. Configure
```cmd
.\config.cmd --url https://github.com/YOUR_ORG/YOUR_REPO --token YOUR_TOKEN
```
### 4. Run as Service
```cmd
# Install service
.\svc.sh install
# Start service
.\svc.sh start
```
## Troubleshooting
### Common Issues
**PowerShell Execution Policy:**
```powershell
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
```
**Windows Defender/Antivirus:**
- Add runner directory to antivirus exclusions
- Exclude `Runner.Listener.exe` and `Runner.Worker.exe`
**Firewall Issues:**
```powershell
# Allow runner through Windows Firewall
New-NetFirewallRule -DisplayName "GitHub Actions Runner" -Direction Inbound -Protocol TCP -LocalPort 443 -Action Allow
```
**Permission Issues:**
- Run `config.cmd` as Administrator
- Ensure the runner user has "Log on as a service" rights
### Getting Help
- Check our [troubleshooting guide](../checks/README.md)
- Review [common issues](../checks/actions.md)
- Search [GitHub Community Discussions](https://github.com/orgs/community/discussions/categories/actions)
## [More .NET Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)

View File

@@ -1,12 +1,12 @@
# Source: https://github.com/dotnet/dotnet-docker
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble AS build
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy AS build
ARG TARGETOS
ARG TARGETARCH
ARG RUNNER_VERSION
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
ARG DOCKER_VERSION=29.0.2
ARG BUILDX_VERSION=0.30.1
ARG DOCKER_VERSION=28.3.2
ARG BUILDX_VERSION=0.26.1
RUN apt update -y && apt install curl unzip -y
@@ -21,10 +21,6 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
&& unzip ./runner-container-hooks.zip -d ./k8s \
&& rm runner-container-hooks.zip
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
&& rm runner-container-hooks.zip
RUN export RUNNER_ARCH=${TARGETARCH} \
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
@@ -33,15 +29,15 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
&& rm -rf docker.tgz \
&& mkdir -p /usr/local/lib/docker/cli-plugins \
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-noble
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
ENV ImageOS=ubuntu24
ENV ImageOS=ubuntu22
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
RUN apt update -y \
@@ -59,8 +55,7 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
&& chmod 777 /home/runner
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner

View File

@@ -1,27 +1,20 @@
## What's Changed
* Fix owner of /home/runner directory by @nikola-jokic in https://github.com/actions/runner/pull/4132
* Update Docker to v29.0.2 and Buildx to v0.30.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4135
* Update workflow around runner docker image. by @TingluoHuang in https://github.com/actions/runner/pull/4133
* Fix regex for validating runner version format by @TingluoHuang in https://github.com/actions/runner/pull/4136
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4144
* Ensure safe_sleep tries alternative approaches by @TingluoHuang in https://github.com/actions/runner/pull/4146
* Bump actions/github-script from 7 to 8 by @dependabot[bot] in https://github.com/actions/runner/pull/4137
* Bump actions/checkout from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4130
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4149
* Bump docker image to use ubuntu 24.04 by @TingluoHuang in https://github.com/actions/runner/pull/4018
* Add support for case function by @AllanGuigou in https://github.com/actions/runner/pull/4147
* Cleanup feature flag actions_container_action_runner_temp by @ericsciple in https://github.com/actions/runner/pull/4163
* Bump actions/download-artifact from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4155
* Bump actions/upload-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4157
* Set ACTIONS_ORCHESTRATION_ID as env to actions. by @TingluoHuang in https://github.com/actions/runner/pull/4178
* Allow hosted VM report job telemetry via .setup_info file. by @TingluoHuang in https://github.com/actions/runner/pull/4186
* Bump typescript from 5.9.2 to 5.9.3 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4184
* Bump Azure.Storage.Blobs from 12.26.0 to 12.27.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4189
* Update Docker to v28.3.2 and Buildx to v0.26.1 by @github-actions[bot] in https://github.com/actions/runner/pull/3953
* Fix if statement structure in update script and variable reference by @salmanmkc in https://github.com/actions/runner/pull/3956
* Add V2 flow for runner deletion by @Samirat in https://github.com/actions/runner/pull/3954
* Node 20 -> Node 24 migration feature flagging, opt-in and opt-out environment variables by @salmanmkc in https://github.com/actions/runner/pull/3948
* Update Node20 and Node24 to latest by @djs-intel in https://github.com/actions/runner/pull/3972
* Redirect supported OS doc section to current public Docs location by @corycalahan in https://github.com/actions/runner/pull/3979
* Bump Microsoft.NET.Test.Sdk from 17.13.0 to 17.14.1 by @dependabot[bot] in https://github.com/actions/runner/pull/3975
* Bump Azure.Storage.Blobs from 12.24.0 to 12.25.0 by @dependabot[bot] in https://github.com/actions/runner/pull/3974
* Bump actions/download-artifact from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3973
* Bump actions/checkout from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3982
## New Contributors
* @AllanGuigou made their first contribution in https://github.com/actions/runner/pull/4147
* @Samirat made their first contribution in https://github.com/actions/runner/pull/3954
* @djs-intel made their first contribution in https://github.com/actions/runner/pull/3972
**Full Changelog**: https://github.com/actions/runner/compare/v2.330.0...v2.331.0
**Full Changelog**: https://github.com/actions/runner/compare/v2.327.1...v2.328.0
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.

View File

@@ -1,5 +1,5 @@
{
"plugins": ["@typescript-eslint", "@stylistic"],
"plugins": ["@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
@@ -26,7 +26,7 @@
],
"camelcase": "off",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@stylistic/func-call-spacing": ["error", "never"],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
@@ -47,8 +47,8 @@
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"@stylistic/semi": ["error", "never"],
"@stylistic/type-annotation-spacing": "error",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,7 @@
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/hashFiles",
"all": "npm run format && npm run lint && npm run build && npm run pack",
"prepare": "cd ../../../../ && husky"
"prepare": "cd ../../../../ && husky install"
},
"repository": {
"type": "git",
@@ -35,17 +35,16 @@
"@actions/glob": "^0.4.0"
},
"devDependencies": {
"@stylistic/eslint-plugin": "^3.1.0",
"@types/node": "^22.0.0",
"@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vercel/ncc": "^0.38.3",
"@types/node": "^20.6.2",
"@typescript-eslint/eslint-plugin": "^6.7.2",
"@typescript-eslint/parser": "^6.7.2",
"@vercel/ncc": "^0.38.0",
"eslint": "^8.47.0",
"eslint-plugin-github": "^4.10.2",
"eslint-plugin-github": "^4.10.0",
"eslint-plugin-prettier": "^5.0.0",
"husky": "^9.1.7",
"husky": "^8.0.3",
"lint-staged": "^15.5.0",
"prettier": "^3.0.3",
"typescript": "^5.9.3"
"typescript": "^5.2.2"
}
}

View File

@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
NODE20_VERSION="20.19.6"
NODE24_VERSION="24.12.0"
NODE20_VERSION="20.19.4"
NODE24_VERSION="24.5.0"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args

View File

@@ -1,6 +1,6 @@
[Unit]
Description={{Description}}
After=network-online.target
After=network.target
[Service]
ExecStart={{RunnerRoot}}/runsvc.sh

View File

@@ -1,7 +1,7 @@
/******/ (() => { // webpackBootstrap
/******/ var __webpack_modules__ = ({
/***/ 4711:
/***/ 2627:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -22,23 +22,13 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@@ -56,15 +46,15 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
const crypto = __importStar(__nccwpck_require__(6982));
const fs = __importStar(__nccwpck_require__(9896));
const glob = __importStar(__nccwpck_require__(7206));
const path = __importStar(__nccwpck_require__(6928));
const stream = __importStar(__nccwpck_require__(2203));
const util = __importStar(__nccwpck_require__(9023));
const crypto = __importStar(__nccwpck_require__(6113));
const fs = __importStar(__nccwpck_require__(7147));
const glob = __importStar(__nccwpck_require__(8090));
const path = __importStar(__nccwpck_require__(1017));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
function run() {
var _a, e_1, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
var _a, e_1, _b, _c;
// arg0 -> node
// arg1 -> hashFiles.js
// env[followSymbolicLinks] = true/null
@@ -138,7 +128,7 @@ function run() {
/***/ }),
/***/ 4914:
/***/ 7351:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -164,8 +154,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issue = exports.issueCommand = void 0;
const os = __importStar(__nccwpck_require__(857));
const utils_1 = __nccwpck_require__(302);
const os = __importStar(__nccwpck_require__(2037));
const utils_1 = __nccwpck_require__(5278);
/**
* Commands
*
@@ -237,7 +227,7 @@ function escapeProperty(s) {
/***/ }),
/***/ 7484:
/***/ 2186:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -272,12 +262,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
const command_1 = __nccwpck_require__(4914);
const file_command_1 = __nccwpck_require__(4753);
const utils_1 = __nccwpck_require__(302);
const os = __importStar(__nccwpck_require__(857));
const path = __importStar(__nccwpck_require__(6928));
const oidc_utils_1 = __nccwpck_require__(5306);
const command_1 = __nccwpck_require__(7351);
const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
*/
@@ -562,17 +552,17 @@ exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = __nccwpck_require__(1847);
var summary_1 = __nccwpck_require__(1327);
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
/**
* @deprecated use core.summary
*/
var summary_2 = __nccwpck_require__(1847);
var summary_2 = __nccwpck_require__(1327);
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
/**
* Path exports
*/
var path_utils_1 = __nccwpck_require__(1976);
var path_utils_1 = __nccwpck_require__(2981);
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
@@ -580,7 +570,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct
/***/ }),
/***/ 4753:
/***/ 717:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -609,10 +599,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(9896));
const os = __importStar(__nccwpck_require__(857));
const uuid_1 = __nccwpck_require__(2048);
const utils_1 = __nccwpck_require__(302);
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(5840);
const utils_1 = __nccwpck_require__(5278);
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
@@ -645,7 +635,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage;
/***/ }),
/***/ 5306:
/***/ 8041:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -661,9 +651,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OidcClient = void 0;
const http_client_1 = __nccwpck_require__(4844);
const auth_1 = __nccwpck_require__(4552);
const core_1 = __nccwpck_require__(7484);
const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(5526);
const core_1 = __nccwpck_require__(2186);
class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) {
const requestOptions = {
@@ -729,7 +719,7 @@ exports.OidcClient = OidcClient;
/***/ }),
/***/ 1976:
/***/ 2981:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -755,7 +745,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(__nccwpck_require__(6928));
const path = __importStar(__nccwpck_require__(1017));
/**
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
@@ -794,7 +784,7 @@ exports.toPlatformPath = toPlatformPath;
/***/ }),
/***/ 1847:
/***/ 1327:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -810,8 +800,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = __nccwpck_require__(857);
const fs_1 = __nccwpck_require__(9896);
const os_1 = __nccwpck_require__(2037);
const fs_1 = __nccwpck_require__(7147);
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
@@ -1084,7 +1074,7 @@ exports.summary = _summary;
/***/ }),
/***/ 302:
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -1131,7 +1121,7 @@ exports.toCommandProperties = toCommandProperties;
/***/ }),
/***/ 7206:
/***/ 8090:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1147,8 +1137,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = exports.create = void 0;
const internal_globber_1 = __nccwpck_require__(103);
const internal_hash_files_1 = __nccwpck_require__(3608);
const internal_globber_1 = __nccwpck_require__(8298);
const internal_hash_files_1 = __nccwpck_require__(2448);
/**
* Constructs a globber
*
@@ -1184,7 +1174,7 @@ exports.hashFiles = hashFiles;
/***/ }),
/***/ 8164:
/***/ 1026:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1210,7 +1200,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getOptions = void 0;
const core = __importStar(__nccwpck_require__(7484));
const core = __importStar(__nccwpck_require__(2186));
/**
* Returns a copy with defaults filled in.
*/
@@ -1246,7 +1236,7 @@ exports.getOptions = getOptions;
/***/ }),
/***/ 103:
/***/ 8298:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1300,14 +1290,14 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DefaultGlobber = void 0;
const core = __importStar(__nccwpck_require__(7484));
const fs = __importStar(__nccwpck_require__(9896));
const globOptionsHelper = __importStar(__nccwpck_require__(8164));
const path = __importStar(__nccwpck_require__(6928));
const patternHelper = __importStar(__nccwpck_require__(8891));
const internal_match_kind_1 = __nccwpck_require__(2644);
const internal_pattern_1 = __nccwpck_require__(5370);
const internal_search_state_1 = __nccwpck_require__(9890);
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
const path = __importStar(__nccwpck_require__(1017));
const patternHelper = __importStar(__nccwpck_require__(9005));
const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_pattern_1 = __nccwpck_require__(4536);
const internal_search_state_1 = __nccwpck_require__(9117);
const IS_WINDOWS = process.platform === 'win32';
class DefaultGlobber {
constructor(options) {
@@ -1488,7 +1478,7 @@ exports.DefaultGlobber = DefaultGlobber;
/***/ }),
/***/ 3608:
/***/ 2448:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1530,12 +1520,12 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = void 0;
const crypto = __importStar(__nccwpck_require__(6982));
const core = __importStar(__nccwpck_require__(7484));
const fs = __importStar(__nccwpck_require__(9896));
const stream = __importStar(__nccwpck_require__(2203));
const util = __importStar(__nccwpck_require__(9023));
const path = __importStar(__nccwpck_require__(6928));
const crypto = __importStar(__nccwpck_require__(6113));
const core = __importStar(__nccwpck_require__(2186));
const fs = __importStar(__nccwpck_require__(7147));
const stream = __importStar(__nccwpck_require__(2781));
const util = __importStar(__nccwpck_require__(3837));
const path = __importStar(__nccwpck_require__(1017));
function hashFiles(globber, currentWorkspace, verbose = false) {
var e_1, _a;
var _b;
@@ -1592,7 +1582,7 @@ exports.hashFiles = hashFiles;
/***/ }),
/***/ 2644:
/***/ 1063:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -1617,7 +1607,7 @@ var MatchKind;
/***/ }),
/***/ 4138:
/***/ 1849:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1646,8 +1636,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
const path = __importStar(__nccwpck_require__(6928));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const path = __importStar(__nccwpck_require__(1017));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const IS_WINDOWS = process.platform === 'win32';
/**
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
@@ -1822,7 +1812,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
/***/ }),
/***/ 6617:
/***/ 6836:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1851,9 +1841,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Path = void 0;
const path = __importStar(__nccwpck_require__(6928));
const pathHelper = __importStar(__nccwpck_require__(4138));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const path = __importStar(__nccwpck_require__(1017));
const pathHelper = __importStar(__nccwpck_require__(1849));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const IS_WINDOWS = process.platform === 'win32';
/**
* Helper class for parsing paths into segments
@@ -1942,7 +1932,7 @@ exports.Path = Path;
/***/ }),
/***/ 8891:
/***/ 9005:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1968,8 +1958,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
const pathHelper = __importStar(__nccwpck_require__(4138));
const internal_match_kind_1 = __nccwpck_require__(2644);
const pathHelper = __importStar(__nccwpck_require__(1849));
const internal_match_kind_1 = __nccwpck_require__(1063);
const IS_WINDOWS = process.platform === 'win32';
/**
* Given an array of patterns, returns an array of paths to search.
@@ -2043,7 +2033,7 @@ exports.partialMatch = partialMatch;
/***/ }),
/***/ 5370:
/***/ 4536:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -2072,13 +2062,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Pattern = void 0;
const os = __importStar(__nccwpck_require__(857));
const path = __importStar(__nccwpck_require__(6928));
const pathHelper = __importStar(__nccwpck_require__(4138));
const assert_1 = __importDefault(__nccwpck_require__(2613));
const minimatch_1 = __nccwpck_require__(3772);
const internal_match_kind_1 = __nccwpck_require__(2644);
const internal_path_1 = __nccwpck_require__(6617);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const pathHelper = __importStar(__nccwpck_require__(1849));
const assert_1 = __importDefault(__nccwpck_require__(9491));
const minimatch_1 = __nccwpck_require__(3973);
const internal_match_kind_1 = __nccwpck_require__(1063);
const internal_path_1 = __nccwpck_require__(6836);
const IS_WINDOWS = process.platform === 'win32';
class Pattern {
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
@@ -2305,7 +2295,7 @@ exports.Pattern = Pattern;
/***/ }),
/***/ 9890:
/***/ 9117:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -2323,7 +2313,7 @@ exports.SearchState = SearchState;
/***/ }),
/***/ 4552:
/***/ 5526:
/***/ (function(__unused_webpack_module, exports) {
"use strict";
@@ -2411,7 +2401,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
/***/ }),
/***/ 4844:
/***/ 6255:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -2447,10 +2437,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(__nccwpck_require__(8611));
const https = __importStar(__nccwpck_require__(5692));
const pm = __importStar(__nccwpck_require__(4988));
const tunnel = __importStar(__nccwpck_require__(770));
const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
@@ -3036,7 +3026,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
/***/ }),
/***/ 4988:
/***/ 9835:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -3125,7 +3115,7 @@ function isLoopbackAddress(host) {
/***/ }),
/***/ 9380:
/***/ 9417:
/***/ ((module) => {
"use strict";
@@ -3195,11 +3185,11 @@ function range(a, b, str) {
/***/ }),
/***/ 4691:
/***/ 3717:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var concatMap = __nccwpck_require__(7087);
var balanced = __nccwpck_require__(9380);
var concatMap = __nccwpck_require__(6891);
var balanced = __nccwpck_require__(9417);
module.exports = expandTop;
@@ -3403,7 +3393,7 @@ function expand(str, isTop) {
/***/ }),
/***/ 7087:
/***/ 6891:
/***/ ((module) => {
module.exports = function (xs, fn) {
@@ -3423,19 +3413,19 @@ var isArray = Array.isArray || function (xs) {
/***/ }),
/***/ 3772:
/***/ 3973:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = minimatch
minimatch.Minimatch = Minimatch
var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
sep: '/'
}
minimatch.sep = path.sep
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
var expand = __nccwpck_require__(4691)
var expand = __nccwpck_require__(3717)
var plTypes = {
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
@@ -4377,27 +4367,27 @@ function regExpEscape (s) {
/***/ }),
/***/ 770:
/***/ 4294:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
module.exports = __nccwpck_require__(218);
module.exports = __nccwpck_require__(4219);
/***/ }),
/***/ 218:
/***/ 4219:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
var net = __nccwpck_require__(9278);
var tls = __nccwpck_require__(4756);
var http = __nccwpck_require__(8611);
var https = __nccwpck_require__(5692);
var events = __nccwpck_require__(4434);
var assert = __nccwpck_require__(2613);
var util = __nccwpck_require__(9023);
var net = __nccwpck_require__(1808);
var tls = __nccwpck_require__(4404);
var http = __nccwpck_require__(3685);
var https = __nccwpck_require__(5687);
var events = __nccwpck_require__(2361);
var assert = __nccwpck_require__(9491);
var util = __nccwpck_require__(3837);
exports.httpOverHttp = httpOverHttp;
@@ -4657,7 +4647,7 @@ exports.debug = debug; // for test
/***/ }),
/***/ 2048:
/***/ 5840:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4721,29 +4711,29 @@ Object.defineProperty(exports, "parse", ({
}
}));
var _v = _interopRequireDefault(__nccwpck_require__(6415));
var _v = _interopRequireDefault(__nccwpck_require__(8628));
var _v2 = _interopRequireDefault(__nccwpck_require__(1697));
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
var _v3 = _interopRequireDefault(__nccwpck_require__(4676));
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
var _v4 = _interopRequireDefault(__nccwpck_require__(9771));
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
var _nil = _interopRequireDefault(__nccwpck_require__(7723));
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
var _version = _interopRequireDefault(__nccwpck_require__(5868));
var _version = _interopRequireDefault(__nccwpck_require__(1595));
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/***/ }),
/***/ 216:
/***/ 4569:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4754,7 +4744,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4773,7 +4763,7 @@ exports["default"] = _default;
/***/ }),
/***/ 7723:
/***/ 5332:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -4788,7 +4778,7 @@ exports["default"] = _default;
/***/ }),
/***/ 7267:
/***/ 2746:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4799,7 +4789,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4840,7 +4830,7 @@ exports["default"] = _default;
/***/ }),
/***/ 7879:
/***/ 814:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
@@ -4855,7 +4845,7 @@ exports["default"] = _default;
/***/ }),
/***/ 2973:
/***/ 807:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4866,7 +4856,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = rng;
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4886,7 +4876,7 @@ function rng() {
/***/ }),
/***/ 507:
/***/ 5274:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4897,7 +4887,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4916,7 +4906,7 @@ exports["default"] = _default;
/***/ }),
/***/ 7597:
/***/ 8950:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4927,7 +4917,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -4962,7 +4952,7 @@ exports["default"] = _default;
/***/ }),
/***/ 6415:
/***/ 8628:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -4973,9 +4963,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
var _rng = _interopRequireDefault(__nccwpck_require__(807));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5076,7 +5066,7 @@ exports["default"] = _default;
/***/ }),
/***/ 1697:
/***/ 6409:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5087,9 +5077,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(2930));
var _v = _interopRequireDefault(__nccwpck_require__(5998));
var _md = _interopRequireDefault(__nccwpck_require__(216));
var _md = _interopRequireDefault(__nccwpck_require__(4569));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5099,7 +5089,7 @@ exports["default"] = _default;
/***/ }),
/***/ 2930:
/***/ 5998:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5111,9 +5101,9 @@ Object.defineProperty(exports, "__esModule", ({
exports["default"] = _default;
exports.URL = exports.DNS = void 0;
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5184,7 +5174,7 @@ function _default(name, version, hashfunc) {
/***/ }),
/***/ 4676:
/***/ 5122:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5195,9 +5185,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
var _rng = _interopRequireDefault(__nccwpck_require__(807));
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5228,7 +5218,7 @@ exports["default"] = _default;
/***/ }),
/***/ 9771:
/***/ 9120:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5239,9 +5229,9 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _v = _interopRequireDefault(__nccwpck_require__(2930));
var _v = _interopRequireDefault(__nccwpck_require__(5998));
var _sha = _interopRequireDefault(__nccwpck_require__(507));
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5251,7 +5241,7 @@ exports["default"] = _default;
/***/ }),
/***/ 6200:
/***/ 6900:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5262,7 +5252,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _regex = _interopRequireDefault(__nccwpck_require__(7879));
var _regex = _interopRequireDefault(__nccwpck_require__(814));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5275,7 +5265,7 @@ exports["default"] = _default;
/***/ }),
/***/ 5868:
/***/ 1595:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -5286,7 +5276,7 @@ Object.defineProperty(exports, "__esModule", ({
}));
exports["default"] = void 0;
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
@@ -5303,7 +5293,7 @@ exports["default"] = _default;
/***/ }),
/***/ 2613:
/***/ 9491:
/***/ ((module) => {
"use strict";
@@ -5311,7 +5301,7 @@ module.exports = require("assert");
/***/ }),
/***/ 6982:
/***/ 6113:
/***/ ((module) => {
"use strict";
@@ -5319,7 +5309,7 @@ module.exports = require("crypto");
/***/ }),
/***/ 4434:
/***/ 2361:
/***/ ((module) => {
"use strict";
@@ -5327,7 +5317,7 @@ module.exports = require("events");
/***/ }),
/***/ 9896:
/***/ 7147:
/***/ ((module) => {
"use strict";
@@ -5335,7 +5325,7 @@ module.exports = require("fs");
/***/ }),
/***/ 8611:
/***/ 3685:
/***/ ((module) => {
"use strict";
@@ -5343,7 +5333,7 @@ module.exports = require("http");
/***/ }),
/***/ 5692:
/***/ 5687:
/***/ ((module) => {
"use strict";
@@ -5351,7 +5341,7 @@ module.exports = require("https");
/***/ }),
/***/ 9278:
/***/ 1808:
/***/ ((module) => {
"use strict";
@@ -5359,7 +5349,7 @@ module.exports = require("net");
/***/ }),
/***/ 857:
/***/ 2037:
/***/ ((module) => {
"use strict";
@@ -5367,7 +5357,7 @@ module.exports = require("os");
/***/ }),
/***/ 6928:
/***/ 1017:
/***/ ((module) => {
"use strict";
@@ -5375,7 +5365,7 @@ module.exports = require("path");
/***/ }),
/***/ 2203:
/***/ 2781:
/***/ ((module) => {
"use strict";
@@ -5383,7 +5373,7 @@ module.exports = require("stream");
/***/ }),
/***/ 4756:
/***/ 4404:
/***/ ((module) => {
"use strict";
@@ -5391,7 +5381,7 @@ module.exports = require("tls");
/***/ }),
/***/ 9023:
/***/ 3837:
/***/ ((module) => {
"use strict";
@@ -5441,7 +5431,7 @@ module.exports = require("util");
/******/ // startup
/******/ // Load entry module and return exports
/******/ // This entry module is referenced by other modules so it can't be inlined
/******/ var __webpack_exports__ = __nccwpck_require__(4711);
/******/ var __webpack_exports__ = __nccwpck_require__(2627);
/******/ module.exports = __webpack_exports__;
/******/
/******/ })()

View File

@@ -110,7 +110,7 @@ then
exit 1
fi
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
if [ $? -ne 0 ]
then
echo "'$apt_get' failed with exit code '$?'"

View File

@@ -1,37 +1,6 @@
#!/bin/bash
# try to use sleep if available
if [ -x "$(command -v sleep)" ]; then
sleep "$1"
exit 0
fi
# try to use ping if available
if [ -x "$(command -v ping)" ]; then
ping -c $(( $1 + 1 )) 127.0.0.1 > /dev/null
exit 0
fi
# try to use read -t from stdin/stdout/stderr if we are in bash
if [ -n "$BASH_VERSION" ]; then
if command -v read >/dev/null 2>&1; then
if [ -t 0 ]; then
read -t "$1" -u 0 || :;
exit 0
fi
if [ -t 1 ]; then
read -t "$1" -u 1 || :;
exit 0
fi
if [ -t 2 ]; then
read -t "$1" -u 2 || :;
exit 0
fi
fi
fi
# fallback to a busy wait
SECONDS=0
while [[ $SECONDS -lt $1 ]]; do
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -23,8 +23,6 @@ namespace GitHub.Runner.Common
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token);
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
Task ForceRefreshConnection(VssCredentials credentials);
@@ -69,17 +67,10 @@ namespace GitHub.Runner.Common
var brokerSession = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
return brokerSession;
}
public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken)
{
CheckConnection();
// No retries
await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken);
}
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
{
CheckConnection();

View File

@@ -1,10 +1,10 @@
using System;
using GitHub.Runner.Sdk;
using System;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Text;
using System.Threading;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
@@ -53,9 +53,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseRunnerAdminFlow { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrlV2 { get; set; }
@@ -64,20 +61,8 @@ namespace GitHub.Runner.Common
{
get
{
// If the value has been explicitly set, return it.
if (_isHostedServer.HasValue)
{
return _isHostedServer.Value;
}
// Otherwise, try to infer it from the GitHubUrl.
if (!string.IsNullOrEmpty(GitHubUrl))
{
return UrlUtil.IsHostedServer(new UriBuilder(GitHubUrl));
}
// Default to true since Hosted runners likely don't have this property set.
return true;
// Old runners do not have this property. Hosted runners likely don't have this property either.
return _isHostedServer ?? true;
}
set

View File

@@ -169,23 +169,19 @@ namespace GitHub.Runner.Common
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
public static readonly string CompareWorkflowParser = "actions_runner_compare_workflow_parser";
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
}
// Node version migration related constants
public static class NodeMigration
{
// Node versions
public static readonly string Node20 = "node20";
public static readonly string Node24 = "node24";
// Environment variables for controlling node version selection
public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24";
public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
// Feature flags for controlling the migration phases
public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault";
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";

View File

@@ -30,7 +30,6 @@ namespace GitHub.Runner.Common
string environmentUrl,
IList<Telemetry> telemetry,
string billingOwnerId,
string infrastructureFailureCategory,
CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
@@ -81,12 +80,11 @@ namespace GitHub.Runner.Common
string environmentUrl,
IList<Telemetry> telemetry,
string billingOwnerId,
string infrastructureFailureCategory,
CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, infrastructureFailureCategory, cancellationToken), cancellationToken,
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, cancellationToken), cancellationToken,
shouldRetry: ex =>
ex is not VssUnauthorizedException && // HTTP status 401
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404

View File

@@ -70,7 +70,7 @@ namespace GitHub.Runner.Common
protected async Task RetryRequest(Func<Task> func,
CancellationToken cancellationToken,
int maxAttempts = 5,
int maxRetryAttemptsCount = 5,
Func<Exception, bool> shouldRetry = null
)
{
@@ -79,31 +79,31 @@ namespace GitHub.Runner.Common
await func();
return Unit.Value;
}
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxAttempts, shouldRetry);
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount, shouldRetry);
}
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxAttempts = 5,
int maxRetryAttemptsCount = 5,
Func<Exception, bool> shouldRetry = null
)
{
var attempt = 0;
var retryCount = 0;
while (true)
{
attempt++;
retryCount++;
cancellationToken.ThrowIfCancellationRequested();
try
{
return await func();
}
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex)))
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
{
Trace.Error("Catch exception during request");
Trace.Error(ex);
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left.");
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
await Task.Delay(backOff, cancellationToken);
}
}

View File

@@ -23,7 +23,7 @@ namespace GitHub.Runner.Listener
private RunnerSettings _settings;
private ITerminal _term;
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private VssCredentials _creds;
private VssCredentials _credsV2;
@@ -258,7 +258,7 @@ namespace GitHub.Runner.Listener
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
_runnerStatus = e.Status;
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
@@ -291,7 +291,7 @@ namespace GitHub.Runner.Listener
}
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
_runnerStatus,
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
@@ -417,21 +417,6 @@ namespace GitHub.Runner.Listener
await Task.CompletedTask;
}
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
{
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
await _brokerServer.AcknowledgeRunnerRequestAsync(
runnerRequestId,
_session.SessionId,
_runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
linkedCts.Token);
}
private bool IsGetNextMessageExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException ||

View File

@@ -153,8 +153,8 @@ namespace GitHub.Runner.Listener.Configuration
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseRunnerAdminFlow = authResult.UseRunnerAdminFlow;
Trace.Info($"Using runner-admin flow: {runnerSettings.UseRunnerAdminFlow}");
runnerSettings.UseV2Flow = authResult.UseV2Flow;
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -211,7 +211,7 @@ namespace GitHub.Runner.Listener.Configuration
string poolName = null;
TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools;
if (runnerSettings.UseRunnerAdminFlow)
if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
@@ -259,7 +259,7 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels();
_term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseRunnerAdminFlow)
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
@@ -280,11 +280,10 @@ namespace GitHub.Runner.Listener.Configuration
try
{
if (runnerSettings.UseRunnerAdminFlow)
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
@@ -292,13 +291,6 @@ namespace GitHub.Runner.Listener.Configuration
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
{
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
agent.Properties["EnableAuthMigrationByDefault"] = true;
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
}
}
else
{
@@ -338,11 +330,10 @@ namespace GitHub.Runner.Listener.Configuration
try
{
if (runnerSettings.UseRunnerAdminFlow)
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
@@ -350,13 +341,6 @@ namespace GitHub.Runner.Listener.Configuration
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
{
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
agent.Properties["EnableAuthMigrationByDefault"] = true;
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
}
}
else
{
@@ -416,26 +400,13 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
throw new NotSupportedException("Message queue listen OAuth token.");
}
// allow the server to override the serverUrlV2 and useV2Flow
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
!string.IsNullOrEmpty(serverUrlV2))
{
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
runnerSettings.ServerUrlV2 = serverUrlV2;
}
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
{
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
runnerSettings.UseV2Flow = useV2Flow;
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
if (!runnerSettings.UseV2Flow && !runnerSettings.UseRunnerAdminFlow)
if (!runnerSettings.UseV2Flow)
{
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false);
@@ -458,6 +429,20 @@ namespace GitHub.Runner.Listener.Configuration
}
}
// allow the server to override the serverUrlV2 and useV2Flow
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
!string.IsNullOrEmpty(serverUrlV2))
{
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
runnerSettings.ServerUrlV2 = serverUrlV2;
}
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
{
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
runnerSettings.UseV2Flow = useV2Flow;
}
_term.WriteSection("Runner settings");
// We will Combine() what's stored with root. Defaults to string a relative path
@@ -553,7 +538,7 @@ namespace GitHub.Runner.Listener.Configuration
{
RunnerSettings settings = _store.GetSettings();
if (settings.UseRunnerAdminFlow)
if (settings.UseV2Flow)
{
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId);

View File

@@ -89,7 +89,7 @@ namespace GitHub.Runner.Listener.Configuration
public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseRunnerAdminFlow { get; set; }
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials()
{

View File

@@ -1211,7 +1211,7 @@ namespace GitHub.Runner.Listener
jobAnnotations.Add(annotation.Value);
}
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: null, CancellationToken.None);
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, CancellationToken.None);
}
catch (Exception ex)
{

View File

@@ -32,7 +32,6 @@ namespace GitHub.Runner.Listener
Task DeleteSessionAsync();
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
Task DeleteMessageAsync(TaskAgentMessage message);
Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken);
Task RefreshListenerTokenAsync();
void OnJobStatus(object sender, JobStatusEventArgs e);
@@ -53,7 +52,7 @@ namespace GitHub.Runner.Listener
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private VssCredentials _creds;
private VssCredentials _credsV2;
@@ -218,7 +217,7 @@ namespace GitHub.Runner.Listener
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
_runnerStatus = e.Status;
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
@@ -251,7 +250,7 @@ namespace GitHub.Runner.Listener
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
_session.SessionId,
_lastMessageId,
_runnerStatus,
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
@@ -275,7 +274,7 @@ namespace GitHub.Runner.Listener
}
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
_runnerStatus,
runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
@@ -438,21 +437,6 @@ namespace GitHub.Runner.Listener
await _brokerServer.ForceRefreshConnection(_credsV2);
}
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
{
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
await _brokerServer.AcknowledgeRunnerRequestAsync(
runnerRequestId,
_session.SessionId,
_runnerStatus,
BuildConstants.RunnerPackage.Version,
VarUtil.OS,
VarUtil.OSArchitecture,
linkedCts.Token);
}
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
{
if (_session.EncryptionKey == null ||

View File

@@ -654,42 +654,22 @@ namespace GitHub.Runner.Listener
else
{
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
// Acknowledge (best-effort)
if (messageRef.ShouldAcknowledge) // Temporary feature flag
{
try
{
await _listener.AcknowledgeMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
}
catch (Exception ex)
{
Trace.Error($"Best-effort acknowledge failed for request '{messageRef.RunnerRequestId}'");
Trace.Error(ex);
}
}
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
// Create connection
var credMgr = HostContext.GetService<ICredentialManager>();
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
{
// Connect
var credMgr = HostContext.GetService<ICredentialManager>();
var creds = credMgr.LoadCredentials(allowAuthUrlV2: false);
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
// Get job message
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
}
else
{
// Connect
var credMgr = HostContext.GetService<ICredentialManager>();
var credsV2 = credMgr.LoadCredentials(allowAuthUrlV2: true);
var runServer = HostContext.CreateService<IRunServer>();
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), credsV2);
// Get job message
try
{
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageRef.BillingOwnerId, messageQueueLoopTokenSource.Token);
@@ -718,10 +698,7 @@ namespace GitHub.Runner.Listener
}
}
// Dispatch
jobDispatcher.Run(jobRequestMessage, runOnce);
// Run once?
if (runOnce)
{
Trace.Info("One time used runner received job message.");

View File

@@ -10,9 +10,6 @@ namespace GitHub.Runner.Listener
[DataMember(Name = "runner_request_id")]
public string RunnerRequestId { get; set; }
[DataMember(Name = "should_acknowledge")]
public bool ShouldAcknowledge { get; set; }
[DataMember(Name = "run_service_url")]
public string RunServiceUrl { get; set; }

View File

@@ -111,7 +111,7 @@ namespace GitHub.Runner.Worker
{
// Log the error and fail the PrepareActionsAsync Initialization.
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
executionContext.InfrastructureError(ex.Message, category: "resolve_action");
executionContext.InfrastructureError(ex.Message);
executionContext.Result = TaskResult.Failed;
throw;
}
@@ -119,7 +119,7 @@ namespace GitHub.Runner.Worker
{
// Log the error and fail the PrepareActionsAsync Initialization.
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
executionContext.InfrastructureError(ex.Message, category: "invalid_action_download");
executionContext.InfrastructureError(ex.Message);
executionContext.Result = TaskResult.Failed;
throw;
}
@@ -378,7 +378,7 @@ namespace GitHub.Runner.Worker
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
{
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
var manifestManager = HostContext.GetService<IActionManifestManager>();
if (File.Exists(manifestFile))
{
definition.Data = manifestManager.Load(executionContext, manifestFile);
@@ -777,15 +777,15 @@ namespace GitHub.Runner.Worker
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
if (downloadInfo.PackageDetails != null)
if (downloadInfo.PackageDetails != null)
{
executionContext.Output($"##[group]Download immutable action package '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
executionContext.Output($"Version: {downloadInfo.PackageDetails.Version}");
executionContext.Output($"Digest: {downloadInfo.PackageDetails.ManifestDigest}");
executionContext.Output($"Source commit SHA: {downloadInfo.ResolvedSha}");
executionContext.Output("##[endgroup]");
}
else
}
else
{
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})");
}
@@ -964,7 +964,7 @@ namespace GitHub.Runner.Worker
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
{
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
var manifestManager = HostContext.GetService<IActionManifestManager>();
ActionDefinitionData actionDefinitionData = null;
if (File.Exists(actionManifest))
{

View File

@@ -2,29 +2,29 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Reflection;
using System.Linq;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Actions.WorkflowParser;
using GitHub.Actions.WorkflowParser.Conversion;
using GitHub.Actions.WorkflowParser.ObjectTemplating;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
using GitHub.Actions.Expressions.Data;
using System.Reflection;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManager))]
public interface IActionManifestManager : IRunnerService
{
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile);
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryExpressionData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, ExpressionData> extraExpressionValues);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, ExpressionData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, ExpressionData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
@@ -50,10 +50,10 @@ namespace GitHub.Runner.Worker
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
}
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile)
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionDataNew actionDefinition = new();
ActionDefinitionData actionDefinition = new();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
@@ -160,21 +160,21 @@ namespace GitHub.Runner.Worker
return actionDefinition;
}
public DictionaryExpressionData EvaluateCompositeOutputs(
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, ExpressionData> extraExpressionValues)
IDictionary<string, PipelineContextData> extraExpressionValues)
{
DictionaryExpressionData result = null;
var result = default(DictionaryContextData);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null);
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
result = token.ToExpressionData().AssertDictionary("composite outputs");
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
@@ -184,13 +184,13 @@ namespace GitHub.Runner.Worker
templateContext.Errors.Check();
}
return result ?? new DictionaryExpressionData();
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, ExpressionData> extraExpressionValues)
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new List<string>();
@@ -199,7 +199,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -229,7 +229,7 @@ namespace GitHub.Runner.Worker
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, ExpressionData> extraExpressionValues)
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -238,7 +238,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -281,7 +281,7 @@ namespace GitHub.Runner.Worker
var templateContext = CreateTemplateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null);
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -303,7 +303,7 @@ namespace GitHub.Runner.Worker
private TemplateContext CreateTemplateContext(
IExecutionContext executionContext,
IDictionary<string, ExpressionData> extraExpressionValues = null)
IDictionary<string, PipelineContextData> extraExpressionValues = null)
{
var result = new TemplateContext
{
@@ -314,18 +314,13 @@ namespace GitHub.Runner.Worker
maxEvents: 1000000,
maxBytes: 10 * 1024 * 1024),
Schema = _actionManifestSchema,
// TODO: Switch to real tracewriter for cutover
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
AllowCaseFunction = false,
TraceWriter = executionContext.ToTemplateTraceWriter(),
};
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
{
// Convert old PipelineContextData to new ExpressionData
var json = StringUtil.ConvertToJson(pair.Value, Newtonsoft.Json.Formatting.None);
var newValue = StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.ExpressionData>(json);
result.ExpressionValues[pair.Key] = newValue;
result.ExpressionValues[pair.Key] = pair.Value;
}
// Extra expression values
@@ -337,19 +332,10 @@ namespace GitHub.Runner.Worker
}
}
// Expression functions
foreach (var func in executionContext.ExpressionFunctions)
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
{
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
{
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported in ActionManifestManager")
};
result.ExpressionFunctions.Add(newFunc);
result.ExpressionFunctions.Add(item);
}
// Add the file table from the Execution Context
@@ -382,7 +368,7 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var steps = default(List<GitHub.Actions.WorkflowParser.IStep>);
var steps = default(List<Pipelines.Step>);
foreach (var run in runsMapping)
{
@@ -430,7 +416,7 @@ namespace GitHub.Runner.Worker
break;
case "steps":
var stepsToken = run.Value.AssertSequence("steps");
steps = WorkflowTemplateConverter.ConvertToSteps(templateContext, stepsToken);
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
default:
@@ -449,7 +435,7 @@ namespace GitHub.Runner.Worker
}
else
{
return new ContainerActionExecutionDataNew()
return new ContainerActionExecutionData()
{
Image = imageToken.Value,
Arguments = argsToken,
@@ -492,11 +478,11 @@ namespace GitHub.Runner.Worker
}
else
{
return new CompositeActionExecutionDataNew()
return new CompositeActionExecutionData()
{
Steps = steps,
PreSteps = new List<GitHub.Actions.WorkflowParser.IStep>(),
PostSteps = new Stack<GitHub.Actions.WorkflowParser.IStep>(),
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
PreSteps = new List<Pipelines.ActionStep>(),
PostSteps = new Stack<Pipelines.ActionStep>(),
InitCondition = "always()",
CleanupCondition = "always()",
Outputs = outputs
@@ -521,7 +507,7 @@ namespace GitHub.Runner.Worker
private void ConvertInputs(
TemplateToken inputsToken,
ActionDefinitionDataNew actionDefinition)
ActionDefinitionData actionDefinition)
{
actionDefinition.Inputs = new MappingToken(null, null, null);
var inputsMapping = inputsToken.AssertMapping("inputs");
@@ -556,49 +542,5 @@ namespace GitHub.Runner.Worker
}
}
}
public sealed class ActionDefinitionDataNew
{
public string Name { get; set; }
public string Description { get; set; }
public MappingToken Inputs { get; set; }
public ActionExecutionData Execution { get; set; }
public Dictionary<String, String> Deprecated { get; set; }
}
public sealed class ContainerActionExecutionDataNew : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public string Image { get; set; }
public string EntryPoint { get; set; }
public SequenceToken Arguments { get; set; }
public MappingToken Environment { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
}
public sealed class CompositeActionExecutionDataNew : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => PreSteps.Count > 0;
public override bool HasPost => PostSteps.Count > 0;
public List<GitHub.Actions.WorkflowParser.IStep> PreSteps { get; set; }
public List<GitHub.Actions.WorkflowParser.IStep> Steps { get; set; }
public Stack<GitHub.Actions.WorkflowParser.IStep> PostSteps { get; set; }
public MappingToken Outputs { get; set; }
}
}

View File

@@ -1,547 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Reflection;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManagerLegacy))]
public interface IActionManifestManagerLegacy : IRunnerService
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
public sealed class ActionManifestManagerLegacy : RunnerService, IActionManifestManagerLegacy
{
private TemplateSchema _actionManifestSchema;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
var assembly = Assembly.GetExecutingAssembly();
var json = default(string);
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Worker.action_yaml.json"))
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
_actionManifestSchema = TemplateSchema.Load(objectReader);
ArgUtil.NotNull(_actionManifestSchema, nameof(_actionManifestSchema));
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
}
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionData actionDefinition = new();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.Global.FileTable.Count)
{
executionContext.Global.FileTable.Add(fileRelativePath);
}
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
var actionOutputs = default(MappingToken);
var actionRunValueToken = default(TemplateToken);
foreach (var actionPair in actionMapping)
{
var propertyName = actionPair.Key.AssertString($"action.yml property key");
switch (propertyName.Value)
{
case "name":
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
break;
case "outputs":
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
case "description":
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
break;
case "inputs":
ConvertInputs(actionPair.Value, actionDefinition);
break;
case "runs":
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
actionRunValueToken = actionPair.Value;
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
break;
}
}
// Evaluate Runs Last
if (actionRunValueToken != null)
{
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
}
}
catch (Exception ex)
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
if (templateContext.Errors.Count > 0)
{
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Failed to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
}
else
{
Trace.Info($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
}
return actionDefinition;
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = default(DictionaryContextData);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new List<string>();
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// Sequence
var args = evaluateResult.AssertSequence("container args");
foreach (var arg in args)
{
var str = arg.AssertString("container arg").Value;
result.Add(str);
Trace.Info($"Add argument {str}");
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// Mapping
var mapping = evaluateResult.AssertMapping("container env");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("container env key");
// Literal value
var value = pair.Value.AssertString("container env value");
result[key.Value] = value.Value;
Trace.Info($"Add env {key} = {value}");
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token)
{
string result = "";
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
// String
result = evaluateResult.AssertString($"default value for input '{inputName}'").Value;
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result;
}
private TemplateContext CreateTemplateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> extraExpressionValues = null)
{
var result = new TemplateContext
{
CancellationToken = CancellationToken.None,
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
Memory = new TemplateMemory(
maxDepth: 100,
maxEvents: 1000000,
maxBytes: 10 * 1024 * 1024),
Schema = _actionManifestSchema,
TraceWriter = executionContext.ToTemplateTraceWriter(),
AllowCaseFunction = false,
};
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
// Extra expression values
if (extraExpressionValues?.Count > 0)
{
foreach (var pair in extraExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
{
result.ExpressionFunctions.Add(item);
}
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
{
result.GetFileId(executionContext.Global.FileTable[i]);
}
return result;
}
private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext templateContext,
TemplateToken inputsToken,
String fileRelativePath,
MappingToken outputs = null)
{
var runsMapping = inputsToken.AssertMapping("runs");
var usingToken = default(StringToken);
var imageToken = default(StringToken);
var argsToken = default(SequenceToken);
var entrypointToken = default(StringToken);
var envToken = default(MappingToken);
var mainToken = default(StringToken);
var pluginToken = default(StringToken);
var preToken = default(StringToken);
var preEntrypointToken = default(StringToken);
var preIfToken = default(StringToken);
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var steps = default(List<Pipelines.Step>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
switch (runsKey)
{
case "using":
usingToken = run.Value.AssertString("using");
break;
case "image":
imageToken = run.Value.AssertString("image");
break;
case "args":
argsToken = run.Value.AssertSequence("args");
break;
case "entrypoint":
entrypointToken = run.Value.AssertString("entrypoint");
break;
case "env":
envToken = run.Value.AssertMapping("env");
break;
case "main":
mainToken = run.Value.AssertString("main");
break;
case "plugin":
pluginToken = run.Value.AssertString("plugin");
break;
case "post":
postToken = run.Value.AssertString("post");
break;
case "post-entrypoint":
postEntrypointToken = run.Value.AssertString("post-entrypoint");
break;
case "post-if":
postIfToken = run.Value.AssertString("post-if");
break;
case "pre":
preToken = run.Value.AssertString("pre");
break;
case "pre-entrypoint":
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
break;
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
}
}
if (usingToken != null)
{
if (string.Equals(usingToken.Value, "docker", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(imageToken?.Value))
{
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
}
else
{
return new ContainerActionExecutionData()
{
Image = imageToken.Value,
Arguments = argsToken,
EntryPoint = entrypointToken?.Value,
Environment = envToken,
Pre = preEntrypointToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postEntrypointToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
}
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) ||
string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
}
else
{
return new NodeJSActionExecutionData()
{
NodeVersion = usingToken.Value,
Script = mainToken.Value,
Pre = preToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
{
if (steps == null)
{
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
PreSteps = new List<Pipelines.ActionStep>(),
PostSteps = new Stack<Pipelines.ActionStep>(),
InitCondition = "always()",
CleanupCondition = "always()",
Outputs = outputs
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead.");
}
}
else if (pluginToken != null)
{
return new PluginActionExecutionData()
{
Plugin = pluginToken.Value
};
}
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.");
}
private void ConvertInputs(
TemplateToken inputsToken,
ActionDefinitionData actionDefinition)
{
actionDefinition.Inputs = new MappingToken(null, null, null);
var inputsMapping = inputsToken.AssertMapping("inputs");
foreach (var input in inputsMapping)
{
bool hasDefault = false;
var inputName = input.Key.AssertString("input name");
var inputMetadata = input.Value.AssertMapping("input metadata");
foreach (var metadata in inputMetadata)
{
var metadataName = metadata.Key.AssertString("input metadata").Value;
if (string.Equals(metadataName, "default", StringComparison.OrdinalIgnoreCase))
{
hasDefault = true;
actionDefinition.Inputs.Add(inputName, metadata.Value);
}
else if (string.Equals(metadataName, "deprecationMessage", StringComparison.OrdinalIgnoreCase))
{
if (actionDefinition.Deprecated == null)
{
actionDefinition.Deprecated = new Dictionary<String, String>();
}
var message = metadata.Value.AssertString("input deprecationMessage");
actionDefinition.Deprecated.Add(inputName.Value, message.Value);
}
}
if (!hasDefault)
{
actionDefinition.Inputs.Add(inputName, new StringToken(null, null, null, string.Empty));
}
}
}
}
}

View File

@@ -1,701 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.Actions.WorkflowParser;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ActionManifestManagerWrapper))]
public interface IActionManifestManagerWrapper : IRunnerService
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
public sealed class ActionManifestManagerWrapper : RunnerService, IActionManifestManagerWrapper
{
private IActionManifestManagerLegacy _legacyManager;
private IActionManifestManager _newManager;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_legacyManager = hostContext.GetService<IActionManifestManagerLegacy>();
_newManager = hostContext.GetService<IActionManifestManager>();
}
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
return EvaluateAndCompare(
executionContext,
"Load",
() => _legacyManager.Load(executionContext, manifestFile),
() => ConvertToLegacyActionDefinitionData(_newManager.Load(executionContext, manifestFile)),
(legacyResult, newResult) => CompareActionDefinition(legacyResult, newResult));
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateCompositeOutputs",
() => _legacyManager.EvaluateCompositeOutputs(executionContext, token, extraExpressionValues),
() => ConvertToLegacyContextData<DictionaryContextData>(_newManager.EvaluateCompositeOutputs(executionContext, ConvertToNewToken(token), ConvertToNewExpressionValues(extraExpressionValues))),
(legacyResult, newResult) => CompareDictionaryContextData(legacyResult, newResult));
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateContainerArguments",
() => _legacyManager.EvaluateContainerArguments(executionContext, token, extraExpressionValues),
() => _newManager.EvaluateContainerArguments(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.SequenceToken, ConvertToNewExpressionValues(extraExpressionValues)),
(legacyResult, newResult) => CompareLists(legacyResult, newResult, "ContainerArguments"));
}
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
return EvaluateAndCompare(
executionContext,
"EvaluateContainerEnvironment",
() => _legacyManager.EvaluateContainerEnvironment(executionContext, token, extraExpressionValues),
() => _newManager.EvaluateContainerEnvironment(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.MappingToken, ConvertToNewExpressionValues(extraExpressionValues)),
(legacyResult, newResult) => {
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
return CompareDictionaries(trace, legacyResult, newResult, "ContainerEnvironment");
});
}
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token)
{
return EvaluateAndCompare(
executionContext,
"EvaluateDefaultInput",
() => _legacyManager.EvaluateDefaultInput(executionContext, inputName, token),
() => _newManager.EvaluateDefaultInput(executionContext, inputName, ConvertToNewToken(token)),
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
}
// Conversion helper methods
private ActionDefinitionData ConvertToLegacyActionDefinitionData(ActionDefinitionDataNew newData)
{
if (newData == null)
{
return null;
}
return new ActionDefinitionData
{
Name = newData.Name,
Description = newData.Description,
Inputs = ConvertToLegacyToken<MappingToken>(newData.Inputs),
Deprecated = newData.Deprecated,
Execution = ConvertToLegacyExecution(newData.Execution)
};
}
private ActionExecutionData ConvertToLegacyExecution(ActionExecutionData execution)
{
if (execution == null)
{
return null;
}
// Handle different execution types
if (execution is ContainerActionExecutionDataNew containerNew)
{
return new ContainerActionExecutionData
{
Image = containerNew.Image,
EntryPoint = containerNew.EntryPoint,
Arguments = ConvertToLegacyToken<SequenceToken>(containerNew.Arguments),
Environment = ConvertToLegacyToken<MappingToken>(containerNew.Environment),
Pre = containerNew.Pre,
Post = containerNew.Post,
InitCondition = containerNew.InitCondition,
CleanupCondition = containerNew.CleanupCondition
};
}
else if (execution is CompositeActionExecutionDataNew compositeNew)
{
return new CompositeActionExecutionData
{
Steps = ConvertToLegacySteps(compositeNew.Steps),
Outputs = ConvertToLegacyToken<MappingToken>(compositeNew.Outputs)
};
}
else
{
// For NodeJS and Plugin execution, they don't use new token types, so just return as-is
return execution;
}
}
private List<GitHub.DistributedTask.Pipelines.ActionStep> ConvertToLegacySteps(List<GitHub.Actions.WorkflowParser.IStep> newSteps)
{
if (newSteps == null)
{
return null;
}
// Serialize new steps and deserialize to old steps
var json = StringUtil.ConvertToJson(newSteps, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<List<GitHub.DistributedTask.Pipelines.ActionStep>>(json);
}
private T ConvertToLegacyToken<T>(GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newToken) where T : TemplateToken
{
if (newToken == null)
{
return null;
}
// Serialize and deserialize to convert between token types
var json = StringUtil.ConvertToJson(newToken, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<T>(json);
}
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToNewToken(TemplateToken legacyToken)
{
if (legacyToken == null)
{
return null;
}
var json = StringUtil.ConvertToJson(legacyToken, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
}
private IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData> ConvertToNewExpressionValues(IDictionary<string, PipelineContextData> legacyValues)
{
if (legacyValues == null)
{
return null;
}
var json = StringUtil.ConvertToJson(legacyValues, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData>>(json);
}
private T ConvertToLegacyContextData<T>(GitHub.Actions.Expressions.Data.ExpressionData newData) where T : PipelineContextData
{
if (newData == null)
{
return null;
}
var json = StringUtil.ConvertToJson(newData, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<T>(json);
}
// Comparison helper methods
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
IExecutionContext context,
string methodName,
Func<TLegacy> legacyEvaluator,
Func<TNew> newEvaluator,
Func<TLegacy, TNew, bool> resultComparer)
{
// Legacy only?
if (!((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false)
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER"))))
{
return legacyEvaluator();
}
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
// Legacy evaluator
var legacyException = default(Exception);
var legacyResult = default(TLegacy);
try
{
legacyResult = legacyEvaluator();
}
catch (Exception ex)
{
legacyException = ex;
}
// Compare with new evaluator
try
{
ArgUtil.NotNull(context, nameof(context));
trace.Info(methodName);
// New evaluator
var newException = default(Exception);
var newResult = default(TNew);
try
{
newResult = newEvaluator();
}
catch (Exception ex)
{
newException = ex;
}
// Compare results or exceptions
if (legacyException != null || newException != null)
{
// Either one or both threw exceptions - compare them
if (!CompareExceptions(trace, legacyException, newException))
{
trace.Info($"{methodName} exception mismatch");
RecordMismatch(context, $"{methodName}");
}
}
else
{
// Both succeeded - compare results
// Skip comparison if new implementation returns null (not yet implemented)
if (newResult != null && !resultComparer(legacyResult, newResult))
{
trace.Info($"{methodName} mismatch");
RecordMismatch(context, $"{methodName}");
}
}
}
catch (Exception ex)
{
trace.Info($"Comparison failed: {ex.Message}");
RecordComparisonError(context, $"{methodName}: {ex.Message}");
}
// Re-throw legacy exception if any
if (legacyException != null)
{
throw legacyException;
}
return legacyResult;
}
private void RecordMismatch(IExecutionContext context, string methodName)
{
if (!context.Global.HasActionManifestMismatch)
{
context.Global.HasActionManifestMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestMismatch: {methodName}" };
context.Global.JobTelemetry.Add(telemetry);
}
}
private void RecordComparisonError(IExecutionContext context, string errorDetails)
{
if (!context.Global.HasActionManifestMismatch)
{
context.Global.HasActionManifestMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestComparisonError: {errorDetails}" };
context.Global.JobTelemetry.Add(telemetry);
}
}
private bool CompareActionDefinition(ActionDefinitionData legacyResult, ActionDefinitionData newResult)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacyResult == null && newResult == null)
{
return true;
}
if (legacyResult == null || newResult == null)
{
trace.Info($"CompareActionDefinition mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
return false;
}
if (!string.Equals(legacyResult.Name, newResult.Name, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Name differs (legacy='{legacyResult.Name}', new='{newResult.Name}')");
return false;
}
if (!string.Equals(legacyResult.Description, newResult.Description, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Description differs (legacy='{legacyResult.Description}', new='{newResult.Description}')");
return false;
}
// Compare Inputs token
var legacyInputsJson = legacyResult.Inputs != null ? StringUtil.ConvertToJson(legacyResult.Inputs) : null;
var newInputsJson = newResult.Inputs != null ? StringUtil.ConvertToJson(newResult.Inputs) : null;
if (!string.Equals(legacyInputsJson, newInputsJson, StringComparison.Ordinal))
{
trace.Info($"CompareActionDefinition mismatch - Inputs differ");
return false;
}
// Compare Deprecated
if (!CompareDictionaries(trace, legacyResult.Deprecated, newResult.Deprecated, "Deprecated"))
{
return false;
}
// Compare Execution
if (!CompareExecution(trace, legacyResult.Execution, newResult.Execution))
{
return false;
}
return true;
}
private bool CompareExecution(Tracing trace, ActionExecutionData legacy, ActionExecutionData newExecution)
{
if (legacy == null && newExecution == null)
{
return true;
}
if (legacy == null || newExecution == null)
{
trace.Info($"CompareExecution mismatch - one is null (legacy={legacy == null}, new={newExecution == null})");
return false;
}
if (legacy.GetType() != newExecution.GetType())
{
trace.Info($"CompareExecution mismatch - different types (legacy={legacy.GetType().Name}, new={newExecution.GetType().Name})");
return false;
}
// Compare based on type
if (legacy is NodeJSActionExecutionData legacyNode && newExecution is NodeJSActionExecutionData newNode)
{
return CompareNodeJSExecution(trace, legacyNode, newNode);
}
else if (legacy is ContainerActionExecutionData legacyContainer && newExecution is ContainerActionExecutionData newContainer)
{
return CompareContainerExecution(trace, legacyContainer, newContainer);
}
else if (legacy is CompositeActionExecutionData legacyComposite && newExecution is CompositeActionExecutionData newComposite)
{
return CompareCompositeExecution(trace, legacyComposite, newComposite);
}
else if (legacy is PluginActionExecutionData legacyPlugin && newExecution is PluginActionExecutionData newPlugin)
{
return ComparePluginExecution(trace, legacyPlugin, newPlugin);
}
return true;
}
private bool CompareNodeJSExecution(Tracing trace, NodeJSActionExecutionData legacy, NodeJSActionExecutionData newExecution)
{
if (!string.Equals(legacy.NodeVersion, newExecution.NodeVersion, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - NodeVersion differs (legacy='{legacy.NodeVersion}', new='{newExecution.NodeVersion}')");
return false;
}
if (!string.Equals(legacy.Script, newExecution.Script, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Script differs (legacy='{legacy.Script}', new='{newExecution.Script}')");
return false;
}
if (!string.Equals(legacy.Pre, newExecution.Pre, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Pre differs");
return false;
}
if (!string.Equals(legacy.Post, newExecution.Post, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - Post differs");
return false;
}
if (!string.Equals(legacy.InitCondition, newExecution.InitCondition, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - InitCondition differs");
return false;
}
if (!string.Equals(legacy.CleanupCondition, newExecution.CleanupCondition, StringComparison.Ordinal))
{
trace.Info($"CompareNodeJSExecution mismatch - CleanupCondition differs");
return false;
}
return true;
}
private bool CompareContainerExecution(Tracing trace, ContainerActionExecutionData legacy, ContainerActionExecutionData newExecution)
{
if (!string.Equals(legacy.Image, newExecution.Image, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Image differs");
return false;
}
if (!string.Equals(legacy.EntryPoint, newExecution.EntryPoint, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - EntryPoint differs");
return false;
}
// Compare Arguments token
var legacyArgsJson = legacy.Arguments != null ? StringUtil.ConvertToJson(legacy.Arguments) : null;
var newArgsJson = newExecution.Arguments != null ? StringUtil.ConvertToJson(newExecution.Arguments) : null;
if (!string.Equals(legacyArgsJson, newArgsJson, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Arguments differ");
return false;
}
// Compare Environment token
var legacyEnvJson = legacy.Environment != null ? StringUtil.ConvertToJson(legacy.Environment) : null;
var newEnvJson = newExecution.Environment != null ? StringUtil.ConvertToJson(newExecution.Environment) : null;
if (!string.Equals(legacyEnvJson, newEnvJson, StringComparison.Ordinal))
{
trace.Info($"CompareContainerExecution mismatch - Environment differs");
return false;
}
return true;
}
private bool CompareCompositeExecution(Tracing trace, CompositeActionExecutionData legacy, CompositeActionExecutionData newExecution)
{
// Compare Steps
if (legacy.Steps?.Count != newExecution.Steps?.Count)
{
trace.Info($"CompareCompositeExecution mismatch - Steps.Count differs (legacy={legacy.Steps?.Count}, new={newExecution.Steps?.Count})");
return false;
}
// Compare Outputs token
var legacyOutputsJson = legacy.Outputs != null ? StringUtil.ConvertToJson(legacy.Outputs) : null;
var newOutputsJson = newExecution.Outputs != null ? StringUtil.ConvertToJson(newExecution.Outputs) : null;
if (!string.Equals(legacyOutputsJson, newOutputsJson, StringComparison.Ordinal))
{
trace.Info($"CompareCompositeExecution mismatch - Outputs differ");
return false;
}
return true;
}
private bool ComparePluginExecution(Tracing trace, PluginActionExecutionData legacy, PluginActionExecutionData newExecution)
{
if (!string.Equals(legacy.Plugin, newExecution.Plugin, StringComparison.Ordinal))
{
trace.Info($"ComparePluginExecution mismatch - Plugin differs");
return false;
}
return true;
}
private bool CompareDictionaryContextData(DictionaryContextData legacy, DictionaryContextData newData)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacy == null && newData == null)
{
return true;
}
if (legacy == null || newData == null)
{
trace.Info($"CompareDictionaryContextData mismatch - one is null (legacy={legacy == null}, new={newData == null})");
return false;
}
var legacyJson = StringUtil.ConvertToJson(legacy);
var newJson = StringUtil.ConvertToJson(newData);
if (!string.Equals(legacyJson, newJson, StringComparison.Ordinal))
{
trace.Info($"CompareDictionaryContextData mismatch");
return false;
}
return true;
}
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
if (legacyList == null && newList == null)
{
return true;
}
if (legacyList == null || newList == null)
{
trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
return false;
}
if (legacyList.Count != newList.Count)
{
trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
return false;
}
for (int i = 0; i < legacyList.Count; i++)
{
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
{
trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
return false;
}
}
return true;
}
private bool CompareDictionaries(Tracing trace, IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
{
if (legacyDict == null && newDict == null)
{
return true;
}
if (legacyDict == null || newDict == null)
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
return false;
}
if (legacyDict is Dictionary<string, string> legacyTypedDict && newDict is Dictionary<string, string> newTypedDict)
{
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
return false;
}
}
if (legacyDict.Count != newDict.Count)
{
trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
return false;
}
foreach (var kvp in legacyDict)
{
if (!newDict.TryGetValue(kvp.Key, out var newValue))
{
trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
return false;
}
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
{
trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
return false;
}
}
return true;
}
private bool CompareExceptions(Tracing trace, Exception legacyException, Exception newException)
{
if (legacyException == null && newException == null)
{
return true;
}
if (legacyException == null || newException == null)
{
trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
return false;
}
// Compare exception messages recursively (including inner exceptions)
var legacyMessages = GetExceptionMessages(legacyException);
var newMessages = GetExceptionMessages(newException);
if (legacyMessages.Count != newMessages.Count)
{
trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
return false;
}
for (int i = 0; i < legacyMessages.Count; i++)
{
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
{
trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
return false;
}
}
return true;
}
private IList<string> GetExceptionMessages(Exception ex)
{
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
var messages = new List<string>();
var toProcess = new Queue<Exception>();
toProcess.Enqueue(ex);
int count = 0;
while (toProcess.Count > 0 && count < 50)
{
var current = toProcess.Dequeue();
if (current == null) continue;
messages.Add(current.Message);
count++;
// Special handling for AggregateException - enqueue all inner exceptions
if (current is AggregateException aggregateEx)
{
foreach (var innerEx in aggregateEx.InnerExceptions)
{
if (innerEx != null && count < 50)
{
toProcess.Enqueue(innerEx);
}
}
}
else if (current.InnerException != null)
{
toProcess.Enqueue(current.InnerException);
}
// Failsafe: if we have too many exceptions, stop and return what we have
if (count >= 50)
{
trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
break;
}
}
return messages;
}
}
}

View File

@@ -206,7 +206,7 @@ namespace GitHub.Runner.Worker
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
var manifestManager = HostContext.GetService<IActionManifestManager>();
foreach (var input in definition.Data.Inputs)
{
string key = input.Key.AssertString("action input name").Value;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,480 +0,0 @@
using System;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using Newtonsoft.Json;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// DAP Server interface for handling Debug Adapter Protocol connections.
/// </summary>
[ServiceLocator(Default = typeof(DapServer))]
public interface IDapServer : IRunnerService, IDisposable
{
/// <summary>
/// Starts the DAP TCP server on the specified port.
/// </summary>
/// <param name="port">The port to listen on (default: 4711)</param>
/// <param name="cancellationToken">Cancellation token</param>
Task StartAsync(int port, CancellationToken cancellationToken);
/// <summary>
/// Blocks until a debug client connects.
/// </summary>
/// <param name="cancellationToken">Cancellation token</param>
Task WaitForConnectionAsync(CancellationToken cancellationToken);
/// <summary>
/// Stops the DAP server and closes all connections.
/// </summary>
Task StopAsync();
/// <summary>
/// Sets the debug session that will handle DAP requests.
/// </summary>
/// <param name="session">The debug session</param>
void SetSession(IDapDebugSession session);
/// <summary>
/// Sends an event to the connected debug client.
/// </summary>
/// <param name="evt">The event to send</param>
void SendEvent(Event evt);
/// <summary>
/// Gets whether a debug client is currently connected.
/// </summary>
bool IsConnected { get; }
}
/// <summary>
/// TCP server implementation of the Debug Adapter Protocol.
/// Handles message framing (Content-Length headers) and JSON serialization.
/// </summary>
public sealed class DapServer : RunnerService, IDapServer
{
private const string ContentLengthHeader = "Content-Length: ";
private const string HeaderTerminator = "\r\n\r\n";
private TcpListener _listener;
private TcpClient _client;
private NetworkStream _stream;
private IDapDebugSession _session;
private CancellationTokenSource _cts;
private Task _messageLoopTask;
private TaskCompletionSource<bool> _connectionTcs;
private int _nextSeq = 1;
private readonly object _sendLock = new object();
private bool _disposed = false;
public bool IsConnected => _client?.Connected == true;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
Trace.Info("DapServer initialized");
}
public void SetSession(IDapDebugSession session)
{
_session = session;
Trace.Info("Debug session set");
}
public async Task StartAsync(int port, CancellationToken cancellationToken)
{
Trace.Info($"Starting DAP server on port {port}");
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
_connectionTcs = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
try
{
_listener = new TcpListener(IPAddress.Loopback, port);
_listener.Start();
Trace.Info($"DAP server listening on 127.0.0.1:{port}");
// Start accepting connections in the background
_ = AcceptConnectionAsync(_cts.Token);
}
catch (Exception ex)
{
Trace.Error($"Failed to start DAP server: {ex.Message}");
throw;
}
await Task.CompletedTask;
}
private async Task AcceptConnectionAsync(CancellationToken cancellationToken)
{
try
{
Trace.Info("Waiting for debug client connection...");
// Use cancellation-aware accept
using (cancellationToken.Register(() => _listener?.Stop()))
{
_client = await _listener.AcceptTcpClientAsync();
}
if (cancellationToken.IsCancellationRequested)
{
return;
}
_stream = _client.GetStream();
var remoteEndPoint = _client.Client.RemoteEndPoint;
Trace.Info($"Debug client connected from {remoteEndPoint}");
// Signal that connection is established
_connectionTcs.TrySetResult(true);
// Start processing messages
_messageLoopTask = ProcessMessagesAsync(_cts.Token);
}
catch (ObjectDisposedException) when (cancellationToken.IsCancellationRequested)
{
// Expected when cancellation stops the listener
Trace.Info("Connection accept cancelled");
_connectionTcs.TrySetCanceled();
}
catch (SocketException ex) when (cancellationToken.IsCancellationRequested)
{
// Expected when cancellation stops the listener
Trace.Info($"Connection accept cancelled: {ex.Message}");
_connectionTcs.TrySetCanceled();
}
catch (Exception ex)
{
Trace.Error($"Error accepting connection: {ex.Message}");
_connectionTcs.TrySetException(ex);
}
}
public async Task WaitForConnectionAsync(CancellationToken cancellationToken)
{
Trace.Info("Waiting for debug client to connect...");
using (cancellationToken.Register(() => _connectionTcs.TrySetCanceled()))
{
await _connectionTcs.Task;
}
Trace.Info("Debug client connected");
}
public async Task StopAsync()
{
Trace.Info("Stopping DAP server");
_cts?.Cancel();
// Wait for message loop to complete
if (_messageLoopTask != null)
{
try
{
await _messageLoopTask;
}
catch (OperationCanceledException)
{
// Expected
}
catch (Exception ex)
{
Trace.Warning($"Message loop ended with error: {ex.Message}");
}
}
// Clean up resources
_stream?.Close();
_client?.Close();
_listener?.Stop();
Trace.Info("DAP server stopped");
}
public void SendEvent(Event evt)
{
if (!IsConnected)
{
Trace.Warning($"Cannot send event '{evt.EventType}': no client connected");
return;
}
try
{
lock (_sendLock)
{
evt.Seq = _nextSeq++;
SendMessageInternal(evt);
}
Trace.Info($"Sent event: {evt.EventType}");
}
catch (Exception ex)
{
Trace.Error($"Failed to send event '{evt.EventType}': {ex.Message}");
}
}
private async Task ProcessMessagesAsync(CancellationToken cancellationToken)
{
Trace.Info("Starting DAP message processing loop");
try
{
while (!cancellationToken.IsCancellationRequested && IsConnected)
{
var json = await ReadMessageAsync(cancellationToken);
if (json == null)
{
Trace.Info("Client disconnected (end of stream)");
break;
}
await ProcessMessageAsync(json, cancellationToken);
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
Trace.Info("Message processing cancelled");
}
catch (IOException ex)
{
Trace.Info($"Connection closed: {ex.Message}");
}
catch (Exception ex)
{
Trace.Error($"Error in message loop: {ex}");
}
Trace.Info("DAP message processing loop ended");
}
private async Task ProcessMessageAsync(string json, CancellationToken cancellationToken)
{
Request request = null;
try
{
// Parse the incoming message
request = JsonConvert.DeserializeObject<Request>(json);
if (request == null || request.Type != "request")
{
Trace.Warning($"Received non-request message: {json}");
return;
}
Trace.Info($"Received request: seq={request.Seq}, command={request.Command}");
// Dispatch to session for handling
if (_session == null)
{
Trace.Error("No debug session configured");
SendErrorResponse(request, "No debug session configured");
return;
}
var response = await _session.HandleRequestAsync(request);
response.RequestSeq = request.Seq;
response.Command = request.Command;
response.Type = "response";
lock (_sendLock)
{
response.Seq = _nextSeq++;
SendMessageInternal(response);
}
Trace.Info($"Sent response: seq={response.Seq}, command={response.Command}, success={response.Success}");
}
catch (JsonException ex)
{
Trace.Error($"Failed to parse request: {ex.Message}");
Trace.Error($"JSON: {json}");
}
catch (Exception ex)
{
Trace.Error($"Error processing request: {ex}");
if (request != null)
{
SendErrorResponse(request, ex.Message);
}
}
}
private void SendErrorResponse(Request request, string message)
{
var response = new Response
{
Type = "response",
RequestSeq = request.Seq,
Command = request.Command,
Success = false,
Message = message,
Body = new ErrorResponseBody
{
Error = new Message
{
Id = 1,
Format = message,
ShowUser = true
}
}
};
lock (_sendLock)
{
response.Seq = _nextSeq++;
SendMessageInternal(response);
}
}
/// <summary>
/// Reads a DAP message from the stream.
/// DAP uses HTTP-like message framing: Content-Length: N\r\n\r\n{json}
/// </summary>
private async Task<string> ReadMessageAsync(CancellationToken cancellationToken)
{
// Read headers until we find Content-Length
var headerBuilder = new StringBuilder();
int contentLength = -1;
while (true)
{
var line = await ReadLineAsync(cancellationToken);
if (line == null)
{
// End of stream
return null;
}
if (line.Length == 0)
{
// Empty line marks end of headers
break;
}
headerBuilder.AppendLine(line);
if (line.StartsWith(ContentLengthHeader, StringComparison.OrdinalIgnoreCase))
{
var lengthStr = line.Substring(ContentLengthHeader.Length).Trim();
if (!int.TryParse(lengthStr, out contentLength))
{
throw new InvalidDataException($"Invalid Content-Length: {lengthStr}");
}
}
}
if (contentLength < 0)
{
throw new InvalidDataException("Missing Content-Length header");
}
// Read the JSON body
var buffer = new byte[contentLength];
var totalRead = 0;
while (totalRead < contentLength)
{
var bytesRead = await _stream.ReadAsync(buffer, totalRead, contentLength - totalRead, cancellationToken);
if (bytesRead == 0)
{
throw new EndOfStreamException("Connection closed while reading message body");
}
totalRead += bytesRead;
}
var json = Encoding.UTF8.GetString(buffer);
Trace.Verbose($"Received: {json}");
return json;
}
/// <summary>
/// Reads a line from the stream (terminated by \r\n).
/// </summary>
private async Task<string> ReadLineAsync(CancellationToken cancellationToken)
{
var lineBuilder = new StringBuilder();
var buffer = new byte[1];
var previousWasCr = false;
while (true)
{
var bytesRead = await _stream.ReadAsync(buffer, 0, 1, cancellationToken);
if (bytesRead == 0)
{
// End of stream
return lineBuilder.Length > 0 ? lineBuilder.ToString() : null;
}
var c = (char)buffer[0];
if (c == '\n' && previousWasCr)
{
// Found \r\n, return the line (without the \r)
if (lineBuilder.Length > 0 && lineBuilder[lineBuilder.Length - 1] == '\r')
{
lineBuilder.Length--;
}
return lineBuilder.ToString();
}
previousWasCr = (c == '\r');
lineBuilder.Append(c);
}
}
/// <summary>
/// Sends a DAP message to the stream with Content-Length framing.
/// Must be called within the _sendLock.
/// </summary>
private void SendMessageInternal(ProtocolMessage message)
{
var json = JsonConvert.SerializeObject(message, new JsonSerializerSettings
{
NullValueHandling = NullValueHandling.Ignore
});
var bodyBytes = Encoding.UTF8.GetBytes(json);
var header = $"Content-Length: {bodyBytes.Length}\r\n\r\n";
var headerBytes = Encoding.UTF8.GetBytes(header);
_stream.Write(headerBytes, 0, headerBytes.Length);
_stream.Write(bodyBytes, 0, bodyBytes.Length);
_stream.Flush();
Trace.Verbose($"Sent: {json}");
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (_disposed)
{
return;
}
if (disposing)
{
_cts?.Cancel();
_stream?.Dispose();
_client?.Dispose();
_listener?.Stop();
_cts?.Dispose();
}
_disposed = true;
}
}
}

View File

@@ -1,293 +0,0 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.Runner.Common;
namespace GitHub.Runner.Worker.Dap
{
/// <summary>
/// Provides DAP variable information from the execution context.
/// Maps workflow contexts (github, env, runner, job, steps, secrets) to DAP scopes and variables.
/// </summary>
public sealed class DapVariableProvider
{
// Well-known scope names that map to top-level contexts
private static readonly string[] ScopeNames = { "github", "env", "runner", "job", "steps", "secrets", "inputs", "vars", "matrix", "needs" };
// Reserved variable reference ranges for scopes (1-100)
private const int ScopeReferenceBase = 1;
private const int ScopeReferenceMax = 100;
// Dynamic variable references start after scope range
private const int DynamicReferenceBase = 101;
private readonly IHostContext _hostContext;
private readonly Dictionary<int, (PipelineContextData Data, string Path)> _variableReferences = new();
private int _nextVariableReference = DynamicReferenceBase;
public DapVariableProvider(IHostContext hostContext)
{
_hostContext = hostContext;
}
/// <summary>
/// Resets the variable reference state. Call this when the execution context changes.
/// </summary>
public void Reset()
{
_variableReferences.Clear();
_nextVariableReference = DynamicReferenceBase;
}
/// <summary>
/// Gets the list of scopes for a given execution context.
/// Each scope represents a top-level context like 'github', 'env', etc.
/// </summary>
public List<Scope> GetScopes(IExecutionContext context, int frameId)
{
var scopes = new List<Scope>();
if (context?.ExpressionValues == null)
{
return scopes;
}
for (int i = 0; i < ScopeNames.Length; i++)
{
var scopeName = ScopeNames[i];
if (context.ExpressionValues.TryGetValue(scopeName, out var value) && value != null)
{
var variablesRef = ScopeReferenceBase + i;
var scope = new Scope
{
Name = scopeName,
VariablesReference = variablesRef,
Expensive = false,
// Secrets get a special presentation hint
PresentationHint = scopeName == "secrets" ? "registers" : null
};
// Count named variables if it's a dictionary
if (value is DictionaryContextData dict)
{
scope.NamedVariables = dict.Count;
}
else if (value is CaseSensitiveDictionaryContextData csDict)
{
scope.NamedVariables = csDict.Count;
}
scopes.Add(scope);
}
}
return scopes;
}
/// <summary>
/// Gets variables for a given variable reference.
/// </summary>
public List<Variable> GetVariables(IExecutionContext context, int variablesReference)
{
var variables = new List<Variable>();
if (context?.ExpressionValues == null)
{
return variables;
}
PipelineContextData data = null;
string basePath = null;
bool isSecretsScope = false;
// Check if this is a scope reference (1-100)
if (variablesReference >= ScopeReferenceBase && variablesReference <= ScopeReferenceMax)
{
var scopeIndex = variablesReference - ScopeReferenceBase;
if (scopeIndex < ScopeNames.Length)
{
var scopeName = ScopeNames[scopeIndex];
isSecretsScope = scopeName == "secrets";
if (context.ExpressionValues.TryGetValue(scopeName, out data))
{
basePath = scopeName;
}
}
}
// Check dynamic references
else if (_variableReferences.TryGetValue(variablesReference, out var refData))
{
data = refData.Data;
basePath = refData.Path;
// Check if we're inside the secrets scope
isSecretsScope = basePath?.StartsWith("secrets", StringComparison.OrdinalIgnoreCase) == true;
}
if (data == null)
{
return variables;
}
// Convert the data to variables
ConvertToVariables(data, basePath, isSecretsScope, variables);
return variables;
}
/// <summary>
/// Converts PipelineContextData to DAP Variable objects.
/// </summary>
private void ConvertToVariables(PipelineContextData data, string basePath, bool isSecretsScope, List<Variable> variables)
{
switch (data)
{
case DictionaryContextData dict:
ConvertDictionaryToVariables(dict, basePath, isSecretsScope, variables);
break;
case CaseSensitiveDictionaryContextData csDict:
ConvertCaseSensitiveDictionaryToVariables(csDict, basePath, isSecretsScope, variables);
break;
case ArrayContextData array:
ConvertArrayToVariables(array, basePath, isSecretsScope, variables);
break;
default:
// Scalar value - shouldn't typically get here for a container
break;
}
}
private void ConvertDictionaryToVariables(DictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
{
foreach (var pair in dict)
{
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
variables.Add(variable);
}
}
private void ConvertCaseSensitiveDictionaryToVariables(CaseSensitiveDictionaryContextData dict, string basePath, bool isSecretsScope, List<Variable> variables)
{
foreach (var pair in dict)
{
var variable = CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope);
variables.Add(variable);
}
}
private void ConvertArrayToVariables(ArrayContextData array, string basePath, bool isSecretsScope, List<Variable> variables)
{
for (int i = 0; i < array.Count; i++)
{
var item = array[i];
var variable = CreateVariable($"[{i}]", item, basePath, isSecretsScope);
variable.Name = $"[{i}]";
variables.Add(variable);
}
}
private Variable CreateVariable(string name, PipelineContextData value, string basePath, bool isSecretsScope)
{
var childPath = string.IsNullOrEmpty(basePath) ? name : $"{basePath}.{name}";
var variable = new Variable
{
Name = name,
EvaluateName = $"${{{{ {childPath} }}}}"
};
if (value == null)
{
variable.Value = "null";
variable.Type = "null";
variable.VariablesReference = 0;
return variable;
}
switch (value)
{
case StringContextData str:
if (isSecretsScope)
{
// Always mask secrets regardless of value
variable.Value = "[REDACTED]";
}
else
{
// Mask any secret values that might be in non-secret contexts
variable.Value = MaskSecrets(str.Value);
}
variable.Type = "string";
variable.VariablesReference = 0;
break;
case NumberContextData num:
variable.Value = num.ToString();
variable.Type = "number";
variable.VariablesReference = 0;
break;
case BooleanContextData boolVal:
variable.Value = boolVal.Value ? "true" : "false";
variable.Type = "boolean";
variable.VariablesReference = 0;
break;
case DictionaryContextData dict:
variable.Value = $"Object ({dict.Count} properties)";
variable.Type = "object";
variable.VariablesReference = RegisterVariableReference(dict, childPath);
variable.NamedVariables = dict.Count;
break;
case CaseSensitiveDictionaryContextData csDict:
variable.Value = $"Object ({csDict.Count} properties)";
variable.Type = "object";
variable.VariablesReference = RegisterVariableReference(csDict, childPath);
variable.NamedVariables = csDict.Count;
break;
case ArrayContextData array:
variable.Value = $"Array ({array.Count} items)";
variable.Type = "array";
variable.VariablesReference = RegisterVariableReference(array, childPath);
variable.IndexedVariables = array.Count;
break;
default:
// Unknown type - convert to string representation
var rawValue = value.ToJToken()?.ToString() ?? "unknown";
variable.Value = MaskSecrets(rawValue);
variable.Type = value.GetType().Name;
variable.VariablesReference = 0;
break;
}
return variable;
}
/// <summary>
/// Registers a nested variable reference and returns its ID.
/// </summary>
private int RegisterVariableReference(PipelineContextData data, string path)
{
var reference = _nextVariableReference++;
_variableReferences[reference] = (data, path);
return reference;
}
/// <summary>
/// Masks any secret values in the string using the host context's secret masker.
/// </summary>
private string MaskSecrets(string value)
{
if (string.IsNullOrEmpty(value))
{
return value ?? string.Empty;
}
return _hostContext.SecretMasker.MaskSecrets(value);
}
}
}

View File

@@ -522,10 +522,6 @@ namespace GitHub.Runner.Worker
if (annotation != null)
{
stepResult.Annotations.Add(annotation.Value);
if (annotation.Value.IsInfrastructureIssue && string.IsNullOrEmpty(Global.InfrastructureFailureCategory))
{
Global.InfrastructureFailureCategory = issue.Category;
}
}
});
@@ -1306,14 +1302,10 @@ namespace GitHub.Runner.Worker
UpdateGlobalStepsContext();
}
internal IPipelineTemplateEvaluator ToPipelineTemplateEvaluatorInternal(ObjectTemplating.ITraceWriter traceWriter = null)
{
return new PipelineTemplateEvaluatorWrapper(HostContext, this, traceWriter);
}
private static void NoOp()
{
}
}
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
@@ -1343,9 +1335,9 @@ namespace GitHub.Runner.Worker
}
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void InfrastructureError(this IExecutionContext context, string message, string category = null)
public static void InfrastructureError(this IExecutionContext context, string message)
{
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true, Category = category };
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true };
context.AddIssue(issue, ExecutionContextLogOptions.Default);
}
@@ -1394,15 +1386,8 @@ namespace GitHub.Runner.Worker
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
}
public static IPipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
{
// Create wrapper?
if ((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false) || StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER")))
{
return (context as ExecutionContext).ToPipelineTemplateEvaluatorInternal(traceWriter);
}
// Legacy
if (traceWriter == null)
{
traceWriter = context.ToTemplateTraceWriter();

View File

@@ -22,13 +22,4 @@ namespace GitHub.Runner.Worker.Expressions
return true;
}
}
public sealed class NewAlwaysFunction : GitHub.Actions.Expressions.Sdk.Function
{
protected override Object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext context, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
}

View File

@@ -28,18 +28,4 @@ namespace GitHub.Runner.Worker.Expressions
return jobStatus == ActionResult.Cancelled;
}
}
public sealed class NewCancelledFunction : GitHub.Actions.Expressions.Sdk.Function
{
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Cancelled;
}
}
}

View File

@@ -39,29 +39,4 @@ namespace GitHub.Runner.Worker.Expressions
}
}
}
public sealed class NewFailureFunction : GitHub.Actions.Expressions.Sdk.Function
{
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
if (isCompositeMainStep)
{
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
return actionStatus == ActionResult.Failure;
}
else
{
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
}
}

View File

@@ -143,137 +143,4 @@ namespace GitHub.Runner.Worker.Expressions
}
}
}
public sealed class NewHashFilesFunction : GitHub.Actions.Expressions.Sdk.Function
{
private const int _hashFileTimeoutSeconds = 120;
protected sealed override Object EvaluateCore(
GitHub.Actions.Expressions.Sdk.EvaluationContext context,
out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = context.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData);
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
var githubContext = githubContextData as GitHub.Actions.Expressions.Data.DictionaryExpressionData;
ArgUtil.NotNull(githubContext, nameof(githubContext));
if (!githubContext.TryGetValue(PipelineTemplateConstants.HostWorkspace, out var workspace))
{
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out workspace);
}
ArgUtil.NotNull(workspace, nameof(workspace));
var workspaceData = workspace as GitHub.Actions.Expressions.Data.StringExpressionData;
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
string githubWorkspace = workspaceData.Value;
bool followSymlink = false;
List<string> patterns = new();
var firstParameter = true;
foreach (var parameter in Parameters)
{
var parameterString = parameter.Evaluate(context).ConvertToString();
if (firstParameter)
{
firstParameter = false;
if (parameterString.StartsWith("--"))
{
if (string.Equals(parameterString, "--follow-symbolic-links", StringComparison.OrdinalIgnoreCase))
{
followSymlink = true;
continue;
}
else
{
throw new ArgumentOutOfRangeException($"Invalid glob option {parameterString}, avaliable option: '--follow-symbolic-links'.");
}
}
}
patterns.Add(parameterString);
}
context.Trace.Info($"Search root directory: '{githubWorkspace}'");
context.Trace.Info($"Search pattern: '{string.Join(", ", patterns)}'");
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p = new ProcessInvoker(new NewHashFilesTrace(context.Trace));
p.ErrorDataReceived += ((_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
{
hashResult = data.Data.Substring(10, data.Data.Length - 20);
context.Trace.Info($"Hash result: '{hashResult}'");
}
else
{
context.Trace.Info(data.Data);
}
});
p.OutputDataReceived += ((_, data) =>
{
context.Trace.Info(data.Data);
});
var env = new Dictionary<string, string>();
if (followSymlink)
{
env["followSymbolicLinks"] = "true";
}
env["patterns"] = string.Join(Environment.NewLine, patterns);
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
{
try
{
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
}
}
private sealed class NewHashFilesTrace : ITraceWriter
{
private GitHub.Actions.Expressions.ITraceWriter _trace;
public NewHashFilesTrace(GitHub.Actions.Expressions.ITraceWriter trace)
{
_trace = trace;
}
public void Info(string message)
{
_trace.Info(message);
}
public void Verbose(string message)
{
_trace.Info(message);
}
}
}
}

View File

@@ -39,29 +39,4 @@ namespace GitHub.Runner.Worker.Expressions
}
}
}
public sealed class NewSuccessFunction : GitHub.Actions.Expressions.Sdk.Function
{
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
if (isCompositeMainStep)
{
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
return actionStatus == ActionResult.Success;
}
else
{
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
}
}

View File

@@ -27,9 +27,6 @@ namespace GitHub.Runner.Worker
public StepsContext StepsContext { get; set; }
public Variables Variables { get; set; }
public bool WriteDebug { get; set; }
public string InfrastructureFailureCategory { get; set; }
public JObject ContainerHookState { get; set; }
public bool HasTemplateEvaluatorMismatch { get; set; }
public bool HasActionManifestMismatch { get; set; }
}
}

View File

@@ -187,7 +187,7 @@ namespace GitHub.Runner.Worker.Handlers
if (Data.Outputs != null)
{
// Evaluate the outputs in the steps context to easily retrieve the values
var actionManifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
// Format ExpressionValues to Dictionary<string, PipelineContextData>
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);

View File

@@ -135,7 +135,7 @@ namespace GitHub.Runner.Worker.Handlers
var extraExpressionValues = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
extraExpressionValues["inputs"] = inputsContext;
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
var manifestManager = HostContext.GetService<IActionManifestManager>();
if (Data.Arguments != null)
{
container.ContainerEntryPointArgs = "";
@@ -191,13 +191,11 @@ namespace GitHub.Runner.Worker.Handlers
ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory));
container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
container.MountVolumes.Add(new MountVolume(tempDirectory, "/github/runner_temp"));
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
container.MountVolumes.Add(new MountVolume(tempFileCommandDirectory, "/github/file_commands"));
container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace"));
container.AddPathTranslateMapping(tempDirectory, "/github/runner_temp");
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
container.AddPathTranslateMapping(tempFileCommandDirectory, "/github/file_commands");
@@ -239,14 +237,6 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_RESULTS_URL"] = resultsUrl;
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
foreach (var variable in this.Environment)
{
container.ContainerEnvironmentVariables[variable.Key] = container.TranslateToContainerPath(variable.Value);

View File

@@ -77,14 +77,6 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_CACHE_SERVICE_V2"] = bool.TrueString;
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
// Resolve the target script.
string target = null;
if (stage == ActionRunStage.Main)

View File

@@ -318,14 +318,6 @@ namespace GitHub.Runner.Worker.Handlers
Environment["ACTIONS_ID_TOKEN_REQUEST_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
}
if (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.SetOrchestrationIdEnvForActions) ?? false)
{
if (ExecutionContext.Global.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out var orchestrationId) && !string.IsNullOrEmpty(orchestrationId))
{
Environment["ACTIONS_ORCHESTRATION_ID"] = orchestrationId;
}
}
ExecutionContext.Debug($"{fileName} {arguments}");
Inputs.TryGetValue("standardInInput", out var standardInInput);

View File

@@ -112,13 +112,6 @@ namespace GitHub.Runner.Worker
groupName = "Machine Setup Info";
}
// not output internal groups
if (groupName.StartsWith("_internal_", StringComparison.OrdinalIgnoreCase))
{
jobContext.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = info.Detail });
continue;
}
context.Output($"##[group]{groupName}");
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
@@ -407,10 +400,6 @@ namespace GitHub.Runner.Worker
if (snapshotRequest != null)
{
var snapshotOperationProvider = HostContext.GetService<ISnapshotOperationProvider>();
// Check that that runner is capable of taking a snapshot
snapshotOperationProvider.RunSnapshotPreflightChecks(context);
// Add postjob step to write snapshot file
jobContext.RegisterPostJobStep(new JobExtensionRunner(
runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest),
condition: snapshotRequest.Condition,

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -13,7 +13,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Dap;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
@@ -113,7 +112,6 @@ namespace GitHub.Runner.Worker
IExecutionContext jobContext = null;
CancellationTokenRegistration? runnerShutdownRegistration = null;
IDapServer dapServer = null;
try
{
// Create the job execution context.
@@ -161,47 +159,6 @@ namespace GitHub.Runner.Worker
if (jobContext.Global.WriteDebug)
{
jobContext.SetRunnerContext("debug", "1");
// Start DAP server for interactive debugging
// This allows debugging workflow jobs with DAP-compatible editors (nvim-dap, VS Code, etc.)
try
{
var port = 4711;
var portEnv = Environment.GetEnvironmentVariable("ACTIONS_DAP_PORT");
if (!string.IsNullOrEmpty(portEnv) && int.TryParse(portEnv, out var customPort))
{
port = customPort;
}
dapServer = HostContext.GetService<IDapServer>();
var debugSession = HostContext.GetService<IDapDebugSession>();
// Wire up the server and session
dapServer.SetSession(debugSession);
debugSession.SetDapServer(dapServer);
await dapServer.StartAsync(port, jobRequestCancellationToken);
Trace.Info($"DAP server listening on port {port}");
jobContext.Output($"DAP debugger waiting for connection on port {port}...");
jobContext.Output($"Connect your DAP client (nvim-dap, VS Code, etc.) to attach to this job.");
// Block until debugger connects
await dapServer.WaitForConnectionAsync(jobRequestCancellationToken);
Trace.Info("DAP client connected, continuing job execution");
jobContext.Output("Debugger connected. Job execution will pause before each step.");
}
catch (OperationCanceledException)
{
// Job was cancelled before debugger connected
Trace.Info("Job cancelled while waiting for DAP client connection");
}
catch (Exception ex)
{
// Log but don't fail the job if DAP server fails to start
Trace.Warning($"Failed to start DAP server: {ex.Message}");
jobContext.Warning($"DAP debugging unavailable: {ex.Message}");
dapServer = null;
}
}
jobContext.SetRunnerContext("os", VarUtil.OS);
@@ -302,20 +259,6 @@ namespace GitHub.Runner.Worker
runnerShutdownRegistration = null;
}
// Stop DAP server if it was started
if (dapServer != null)
{
try
{
Trace.Info("Stopping DAP server");
await dapServer.StopAsync();
}
catch (Exception ex)
{
Trace.Warning($"Error stopping DAP server: {ex.Message}");
}
}
await ShutdownQueue(throwOnFailure: false);
}
}
@@ -378,7 +321,7 @@ namespace GitHub.Runner.Worker
{
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, telemetry, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: jobContext.Global.InfrastructureFailureCategory, default);
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, telemetry, billingOwnerId: message.BillingOwnerId, default);
return result;
}
catch (VssUnauthorizedException ex)

View File

@@ -1,679 +0,0 @@
using System;
using System.Collections.Generic;
using GitHub.Actions.WorkflowParser;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
internal sealed class PipelineTemplateEvaluatorWrapper : IPipelineTemplateEvaluator
{
private PipelineTemplateEvaluator _legacyEvaluator;
private WorkflowTemplateEvaluator _newEvaluator;
private IExecutionContext _context;
private Tracing _trace;
public PipelineTemplateEvaluatorWrapper(
IHostContext hostContext,
IExecutionContext context,
ObjectTemplating.ITraceWriter traceWriter = null)
{
ArgUtil.NotNull(hostContext, nameof(hostContext));
ArgUtil.NotNull(context, nameof(context));
_context = context;
_trace = hostContext.GetTrace(nameof(PipelineTemplateEvaluatorWrapper));
if (traceWriter == null)
{
traceWriter = context.ToTemplateTraceWriter();
}
// Legacy evaluator
var schema = PipelineTemplateSchemaFactory.GetSchema();
_legacyEvaluator = new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable)
{
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
};
// New evaluator
var newTraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter();
_newEvaluator = new WorkflowTemplateEvaluator(newTraceWriter, context.Global.FileTable, features: null)
{
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
};
}
public bool EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateStepContinueOnError",
() => _legacyEvaluator.EvaluateStepContinueOnError(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateStepContinueOnError(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => legacyResult == newResult);
}
public string EvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateStepDisplayName",
() => _legacyEvaluator.EvaluateStepDisplayName(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateStepName(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
}
public Dictionary<string, string> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
StringComparer keyComparer)
{
return EvaluateAndCompare(
"EvaluateStepEnvironment",
() => _legacyEvaluator.EvaluateStepEnvironment(token, contextData, expressionFunctions, keyComparer),
() => _newEvaluator.EvaluateStepEnvironment(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions), keyComparer),
CompareStepEnvironment);
}
public bool EvaluateStepIf(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<string, object>> expressionState)
{
return EvaluateAndCompare(
"EvaluateStepIf",
() => _legacyEvaluator.EvaluateStepIf(token, contextData, expressionFunctions, expressionState),
() => _newEvaluator.EvaluateStepIf(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions), expressionState),
(legacyResult, newResult) => legacyResult == newResult);
}
public Dictionary<string, string> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateStepInputs",
() => _legacyEvaluator.EvaluateStepInputs(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateStepInputs(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "StepInputs"));
}
public int EvaluateStepTimeout(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateStepTimeout",
() => _legacyEvaluator.EvaluateStepTimeout(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateStepTimeout(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => legacyResult == newResult);
}
public GitHub.DistributedTask.Pipelines.JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateJobContainer",
() => _legacyEvaluator.EvaluateJobContainer(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateJobContainer(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
CompareJobContainer);
}
public Dictionary<string, string> EvaluateJobOutput(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateJobOutput",
() => _legacyEvaluator.EvaluateJobOutput(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateJobOutputs(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "JobOutput"));
}
public TemplateToken EvaluateEnvironmentUrl(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateEnvironmentUrl",
() => _legacyEvaluator.EvaluateEnvironmentUrl(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateJobEnvironmentUrl(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
CompareEnvironmentUrl);
}
public Dictionary<string, string> EvaluateJobDefaultsRun(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateJobDefaultsRun",
() => _legacyEvaluator.EvaluateJobDefaultsRun(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateJobDefaultsRun(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "JobDefaultsRun"));
}
public IList<KeyValuePair<string, GitHub.DistributedTask.Pipelines.JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateJobServiceContainers",
() => _legacyEvaluator.EvaluateJobServiceContainers(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateJobServiceContainers(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
(legacyResult, newResult) => CompareJobServiceContainers(legacyResult, newResult));
}
public GitHub.DistributedTask.Pipelines.Snapshot EvaluateJobSnapshotRequest(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
return EvaluateAndCompare(
"EvaluateJobSnapshotRequest",
() => _legacyEvaluator.EvaluateJobSnapshotRequest(token, contextData, expressionFunctions),
() => _newEvaluator.EvaluateSnapshot(string.Empty, ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
CompareSnapshot);
}
private void RecordMismatch(string methodName)
{
if (!_context.Global.HasTemplateEvaluatorMismatch)
{
_context.Global.HasTemplateEvaluatorMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"TemplateEvaluatorMismatch: {methodName}" };
_context.Global.JobTelemetry.Add(telemetry);
}
}
private void RecordComparisonError(string errorDetails)
{
if (!_context.Global.HasTemplateEvaluatorMismatch)
{
_context.Global.HasTemplateEvaluatorMismatch = true;
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"TemplateEvaluatorComparisonError: {errorDetails}" };
_context.Global.JobTelemetry.Add(telemetry);
}
}
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
string methodName,
Func<TLegacy> legacyEvaluator,
Func<TNew> newEvaluator,
Func<TLegacy, TNew, bool> resultComparer)
{
// Legacy evaluator
var legacyException = default(Exception);
var legacyResult = default(TLegacy);
try
{
legacyResult = legacyEvaluator();
}
catch (Exception ex)
{
legacyException = ex;
}
// Compare with new evaluator
try
{
ArgUtil.NotNull(_context, nameof(_context));
ArgUtil.NotNull(_newEvaluator, nameof(_newEvaluator));
_trace.Info(methodName);
// New evaluator
var newException = default(Exception);
var newResult = default(TNew);
try
{
newResult = newEvaluator();
}
catch (Exception ex)
{
newException = ex;
}
// Compare results or exceptions
if (legacyException != null || newException != null)
{
// Either one or both threw exceptions - compare them
if (!CompareExceptions(legacyException, newException))
{
_trace.Info($"{methodName} exception mismatch");
RecordMismatch($"{methodName}");
}
}
else
{
// Both succeeded - compare results
if (!resultComparer(legacyResult, newResult))
{
_trace.Info($"{methodName} mismatch");
RecordMismatch($"{methodName}");
}
}
}
catch (Exception ex)
{
_trace.Info($"Comparison failed: {ex.Message}");
RecordComparisonError($"{methodName}: {ex.Message}");
}
// Re-throw legacy exception if any
if (legacyException != null)
{
throw legacyException;
}
return legacyResult;
}
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToken(
GitHub.DistributedTask.ObjectTemplating.Tokens.TemplateToken token)
{
if (token == null)
{
return null;
}
var json = StringUtil.ConvertToJson(token, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
}
private GitHub.Actions.Expressions.Data.DictionaryExpressionData ConvertData(
GitHub.DistributedTask.Pipelines.ContextData.DictionaryContextData contextData)
{
if (contextData == null)
{
return null;
}
var json = StringUtil.ConvertToJson(contextData, Newtonsoft.Json.Formatting.None);
return StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.DictionaryExpressionData>(json);
}
private IList<GitHub.Actions.Expressions.IFunctionInfo> ConvertFunctions(
IList<GitHub.DistributedTask.Expressions2.IFunctionInfo> expressionFunctions)
{
if (expressionFunctions == null)
{
return null;
}
var result = new List<GitHub.Actions.Expressions.IFunctionInfo>();
foreach (var func in expressionFunctions)
{
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
{
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported for conversion")
};
result.Add(newFunc);
}
return result;
}
private bool CompareStepEnvironment(
Dictionary<string, string> legacyResult,
Dictionary<string, string> newResult)
{
return CompareDictionaries(legacyResult, newResult, "StepEnvironment");
}
private bool CompareEnvironmentUrl(
TemplateToken legacyResult,
GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newResult)
{
var legacyJson = legacyResult != null ? Newtonsoft.Json.JsonConvert.SerializeObject(legacyResult, Newtonsoft.Json.Formatting.None) : null;
var newJson = newResult != null ? Newtonsoft.Json.JsonConvert.SerializeObject(newResult, Newtonsoft.Json.Formatting.None) : null;
return legacyJson == newJson;
}
private bool CompareJobContainer(
GitHub.DistributedTask.Pipelines.JobContainer legacyResult,
GitHub.Actions.WorkflowParser.JobContainer newResult)
{
if (legacyResult == null && newResult == null)
{
return true;
}
if (legacyResult == null || newResult == null)
{
_trace.Info($"CompareJobContainer mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
return false;
}
if (!string.Equals(legacyResult.Image, newResult.Image, StringComparison.Ordinal))
{
_trace.Info($"CompareJobContainer mismatch - Image differs (legacy='{legacyResult.Image}', new='{newResult.Image}')");
return false;
}
if (!string.Equals(legacyResult.Options, newResult.Options, StringComparison.Ordinal))
{
_trace.Info($"CompareJobContainer mismatch - Options differs (legacy='{legacyResult.Options}', new='{newResult.Options}')");
return false;
}
if (!CompareDictionaries(legacyResult.Environment, newResult.Environment, "Environment"))
{
return false;
}
if (!CompareLists(legacyResult.Volumes, newResult.Volumes, "Volumes"))
{
return false;
}
if (!CompareLists(legacyResult.Ports, newResult.Ports, "Ports"))
{
return false;
}
if (!CompareCredentials(legacyResult.Credentials, newResult.Credentials))
{
return false;
}
return true;
}
private bool CompareCredentials(
GitHub.DistributedTask.Pipelines.ContainerRegistryCredentials legacyCreds,
GitHub.Actions.WorkflowParser.ContainerRegistryCredentials newCreds)
{
if (legacyCreds == null && newCreds == null)
{
return true;
}
if (legacyCreds == null || newCreds == null)
{
_trace.Info($"CompareCredentials mismatch - one is null (legacy={legacyCreds == null}, new={newCreds == null})");
return false;
}
if (!string.Equals(legacyCreds.Username, newCreds.Username, StringComparison.Ordinal))
{
_trace.Info($"CompareCredentials mismatch - Credentials.Username differs (legacy='{legacyCreds.Username}', new='{newCreds.Username}')");
return false;
}
if (!string.Equals(legacyCreds.Password, newCreds.Password, StringComparison.Ordinal))
{
_trace.Info($"CompareCredentials mismatch - Credentials.Password differs");
return false;
}
return true;
}
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
{
if (legacyList == null && newList == null)
{
return true;
}
if (legacyList == null || newList == null)
{
_trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
return false;
}
if (legacyList.Count != newList.Count)
{
_trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
return false;
}
for (int i = 0; i < legacyList.Count; i++)
{
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
{
_trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
return false;
}
}
return true;
}
private bool CompareDictionaries(IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
{
if (legacyDict == null && newDict == null)
{
return true;
}
if (legacyDict == null || newDict == null)
{
_trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
return false;
}
if (legacyDict is Dictionary<String, String> legacyTypedDict && newDict is Dictionary<String, String> newTypedDict)
{
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
{
_trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
return false;
}
}
if (legacyDict.Count != newDict.Count)
{
_trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
return false;
}
foreach (var kvp in legacyDict)
{
if (!newDict.TryGetValue(kvp.Key, out var newValue))
{
_trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
return false;
}
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
{
_trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
return false;
}
}
return true;
}
private bool CompareJobServiceContainers(
IList<KeyValuePair<string, GitHub.DistributedTask.Pipelines.JobContainer>> legacyResult,
IList<KeyValuePair<string, GitHub.Actions.WorkflowParser.JobContainer>> newResult)
{
if (legacyResult == null && newResult == null)
{
return true;
}
if (legacyResult == null || newResult == null)
{
_trace.Info($"CompareJobServiceContainers mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
return false;
}
if (legacyResult.Count != newResult.Count)
{
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers.Count differs (legacy={legacyResult.Count}, new={newResult.Count})");
return false;
}
for (int i = 0; i < legacyResult.Count; i++)
{
var legacyKvp = legacyResult[i];
var newKvp = newResult[i];
if (!string.Equals(legacyKvp.Key, newKvp.Key, StringComparison.Ordinal))
{
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers[{i}].Key differs (legacy='{legacyKvp.Key}', new='{newKvp.Key}')");
return false;
}
if (!CompareJobContainer(legacyKvp.Value, newKvp.Value))
{
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers['{legacyKvp.Key}']");
return false;
}
}
return true;
}
private bool CompareSnapshot(
GitHub.DistributedTask.Pipelines.Snapshot legacyResult,
GitHub.Actions.WorkflowParser.Snapshot newResult)
{
if (legacyResult == null && newResult == null)
{
return true;
}
if (legacyResult == null || newResult == null)
{
_trace.Info($"CompareSnapshot mismatch - one is null (legacy={legacyResult == null}, new={newResult == null})");
return false;
}
if (!string.Equals(legacyResult.ImageName, newResult.ImageName, StringComparison.Ordinal))
{
_trace.Info($"CompareSnapshot mismatch - Snapshot.ImageName differs (legacy='{legacyResult.ImageName}', new='{newResult.ImageName}')");
return false;
}
if (!string.Equals(legacyResult.Version, newResult.Version, StringComparison.Ordinal))
{
_trace.Info($"CompareSnapshot mismatch - Snapshot.Version differs (legacy='{legacyResult.Version}', new='{newResult.Version}')");
return false;
}
// Compare Condition (legacy) vs If (new)
// Legacy has Condition as string, new has If as BasicExpressionToken
// For comparison, we'll serialize the If token and compare with Condition
var newIfValue = newResult.If != null ? Newtonsoft.Json.JsonConvert.SerializeObject(newResult.If, Newtonsoft.Json.Formatting.None) : null;
// Legacy Condition is a string expression like "success()"
// New If is a BasicExpressionToken that needs to be serialized
// We'll do a basic comparison - if both are null/empty or both exist
var legacyHasCondition = !string.IsNullOrEmpty(legacyResult.Condition);
var newHasIf = newResult.If != null;
if (legacyHasCondition != newHasIf)
{
_trace.Info($"CompareSnapshot mismatch - condition/if presence differs (legacy has condition={legacyHasCondition}, new has if={newHasIf})");
return false;
}
return true;
}
private bool CompareExceptions(Exception legacyException, Exception newException)
{
if (legacyException == null && newException == null)
{
return true;
}
if (legacyException == null || newException == null)
{
_trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
return false;
}
// Compare exception messages recursively (including inner exceptions)
var legacyMessages = GetExceptionMessages(legacyException);
var newMessages = GetExceptionMessages(newException);
if (legacyMessages.Count != newMessages.Count)
{
_trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
return false;
}
for (int i = 0; i < legacyMessages.Count; i++)
{
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
{
_trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
return false;
}
}
return true;
}
private IList<string> GetExceptionMessages(Exception ex)
{
var messages = new List<string>();
var toProcess = new Queue<Exception>();
toProcess.Enqueue(ex);
int count = 0;
while (toProcess.Count > 0 && count < 50)
{
var current = toProcess.Dequeue();
if (current == null) continue;
messages.Add(current.Message);
count++;
// Special handling for AggregateException - enqueue all inner exceptions
if (current is AggregateException aggregateEx)
{
foreach (var innerEx in aggregateEx.InnerExceptions)
{
if (innerEx != null && count < 50)
{
toProcess.Enqueue(innerEx);
}
}
}
else if (current.InnerException != null)
{
toProcess.Enqueue(current.InnerException);
}
// Failsafe: if we have too many exceptions, stop and return what we have
if (count >= 50)
{
_trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
break;
}
}
return messages;
}
}
}

View File

@@ -1,19 +1,15 @@
#nullable enable
using System;
using System.IO;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Handlers;
namespace GitHub.Runner.Worker;
[ServiceLocator(Default = typeof(SnapshotOperationProvider))]
public interface ISnapshotOperationProvider : IRunnerService
{
Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest);
void RunSnapshotPreflightChecks(IExecutionContext jobContext);
}
public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider
@@ -28,32 +24,9 @@ public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvid
}
IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath);
executionContext.Output($"Image Name: {snapshotRequest.ImageName} Version: {snapshotRequest.Version}");
executionContext.Output($"Request written to: {snapshotRequestFilePath}");
executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job.");
executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner.");
return Task.CompletedTask;
}
public void RunSnapshotPreflightChecks(IExecutionContext context)
{
var shouldCheckRunnerEnvironment = context.Global.Variables.GetBoolean(Constants.Runner.Features.SnapshotPreflightHostedRunnerCheck) ?? false;
if (shouldCheckRunnerEnvironment &&
context.Global.Variables.TryGetValue(WellKnownDistributedTaskVariables.RunnerEnvironment, out var runnerEnvironment) &&
!string.IsNullOrEmpty(runnerEnvironment))
{
context.Debug($"Snapshot: RUNNER_ENVIRONMENT={runnerEnvironment}");
if (!string.Equals(runnerEnvironment, "github-hosted", StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException("Snapshot workflows must be run on a GitHub Hosted Runner");
}
}
var imageGenEnabled = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_IMAGE_GEN_ENABLED"));
context.Debug($"Snapshot: GITHUB_ACTIONS_IMAGE_GEN_ENABLED={imageGenEnabled}");
var shouldCheckImageGenPool = context.Global.Variables.GetBoolean(Constants.Runner.Features.SnapshotPreflightImageGenPoolCheck) ?? false;
if (shouldCheckImageGenPool && !imageGenEnabled)
{
throw new ArgumentException("Snapshot workflows must be run a hosted runner with Image Generation enabled");
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
@@ -10,7 +10,6 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Dap;
using GitHub.Runner.Worker.Expressions;
namespace GitHub.Runner.Worker
@@ -51,12 +50,6 @@ namespace GitHub.Runner.Worker
jobContext.JobContext.Status = (jobContext.Result ?? TaskResult.Succeeded).ToActionResult();
var scopeInputs = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
bool checkPostJobActions = false;
// Get debug session for DAP debugging support
// The session's IsActive property determines if debugging is actually enabled
var debugSession = HostContext.GetService<IDapDebugSession>();
bool isFirstStep = true;
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
{
if (jobContext.JobSteps.Count == 0 && !checkPostJobActions)
@@ -188,14 +181,6 @@ namespace GitHub.Runner.Worker
}
}
// Pause for DAP debugger BEFORE step execution
// This happens after expression values are set up so the debugger can inspect variables
if (debugSession?.IsActive == true)
{
await debugSession.OnStepStartingAsync(step, jobContext, isFirstStep);
isFirstStep = false;
}
// Evaluate condition
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
@@ -268,17 +253,8 @@ namespace GitHub.Runner.Worker
Trace.Info($"No need for updating job result with current step result '{step.ExecutionContext.Result}'.");
}
// Notify DAP debugger AFTER step execution
if (debugSession?.IsActive == true)
{
debugSession.OnStepCompleted(step);
}
Trace.Info($"Current state: job state = '{jobContext.Result}'");
}
// Notify DAP debugger that the job has completed
debugSession?.OnJobCompleted();
}
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)

View File

@@ -1,14 +1,14 @@
using System;
using System.Collections.Generic;
using GitHub.Services.Common.Diagnostics;
using System;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Common.Diagnostics;
using System.Collections.Generic;
using System.Linq;
using GitHub.Services.Common.Internal;
namespace GitHub.Services.Common

View File

@@ -146,7 +146,6 @@ namespace GitHub.Services.Common
sockEx.SocketErrorCode == SocketError.TimedOut ||
sockEx.SocketErrorCode == SocketError.HostDown ||
sockEx.SocketErrorCode == SocketError.HostUnreachable ||
sockEx.SocketErrorCode == SocketError.HostNotFound ||
sockEx.SocketErrorCode == SocketError.TryAgain)
{
return true;

View File

@@ -9,7 +9,6 @@ namespace GitHub.DistributedTask.Expressions2
{
static ExpressionConstants()
{
AddFunction<Case>("case", 3, Byte.MaxValue);
AddFunction<Contains>("contains", 2, 2);
AddFunction<EndsWith>("endsWith", 2, 2);
AddFunction<Format>("format", 1, Byte.MaxValue);

View File

@@ -17,10 +17,9 @@ namespace GitHub.DistributedTask.Expressions2
String expression,
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowCaseFunction = true)
IEnumerable<IFunctionInfo> functions)
{
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction);
var context = new ParseContext(expression, trace, namedValues, functions);
context.Trace.Info($"Parsing expression: <{expression}>");
return CreateTree(context);
}
@@ -350,10 +349,6 @@ namespace GitHub.DistributedTask.Expressions2
{
throw new ParseException(ParseExceptionKind.TooManyParameters, token: @operator, expression: context.Expression);
}
else if (functionInfo.Name.Equals("case", StringComparison.OrdinalIgnoreCase) && function.Parameters.Count % 2 == 0)
{
throw new ParseException(ParseExceptionKind.EvenParameters, token: @operator, expression: context.Expression);
}
}
/// <summary>
@@ -416,12 +411,6 @@ namespace GitHub.DistributedTask.Expressions2
String name,
out IFunctionInfo functionInfo)
{
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
{
functionInfo = null;
return false;
}
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
}
@@ -429,7 +418,6 @@ namespace GitHub.DistributedTask.Expressions2
private sealed class ParseContext
{
public Boolean AllowUnknownKeywords;
public Boolean AllowCaseFunction;
public readonly String Expression;
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
@@ -445,8 +433,7 @@ namespace GitHub.DistributedTask.Expressions2
ITraceWriter trace,
IEnumerable<INamedValueInfo> namedValues,
IEnumerable<IFunctionInfo> functions,
Boolean allowUnknownKeywords = false,
Boolean allowCaseFunction = true)
Boolean allowUnknownKeywords = false)
{
Expression = expression ?? String.Empty;
if (Expression.Length > ExpressionConstants.MaxLength)
@@ -467,7 +454,6 @@ namespace GitHub.DistributedTask.Expressions2
LexicalAnalyzer = new LexicalAnalyzer(Expression);
AllowUnknownKeywords = allowUnknownKeywords;
AllowCaseFunction = allowCaseFunction;
}
private class NoOperationTraceWriter : ITraceWriter

View File

@@ -29,9 +29,6 @@ namespace GitHub.DistributedTask.Expressions2
case ParseExceptionKind.TooManyParameters:
description = "Too many parameters supplied";
break;
case ParseExceptionKind.EvenParameters:
description = "Even number of parameters supplied, requires an odd number of parameters";
break;
case ParseExceptionKind.UnexpectedEndOfExpression:
description = "Unexpected end of expression";
break;

View File

@@ -6,7 +6,6 @@
ExceededMaxLength,
TooFewParameters,
TooManyParameters,
EvenParameters,
UnexpectedEndOfExpression,
UnexpectedSymbol,
UnrecognizedFunction,

View File

@@ -1,45 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using GitHub.Actions.Expressions.Data;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class Case : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
// Validate argument count - must be odd (pairs of predicate-result plus default)
if (Parameters.Count % 2 == 0)
{
throw new InvalidOperationException("case requires an odd number of arguments");
}
// Evaluate predicate-result pairs
for (var i = 0; i < Parameters.Count - 1; i += 2)
{
var predicate = Parameters[i].Evaluate(context);
// Predicate must be a boolean
if (predicate.Kind != ValueKind.Boolean)
{
throw new InvalidOperationException("case predicate must evaluate to a boolean value");
}
// If predicate is true, return the corresponding result
if ((Boolean)predicate.Value)
{
var result = Parameters[i + 1].Evaluate(context);
return result.Value;
}
}
// No predicate matched, return default (last argument)
var defaultResult = Parameters[Parameters.Count - 1].Evaluate(context);
return defaultResult.Value;
}
}
}

View File

@@ -86,12 +86,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal ITraceWriter TraceWriter { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the case expression function is allowed.
/// Defaults to true. Set to false to disable the case function.
/// </summary>
internal Boolean AllowCaseFunction { get; set; } = true;
private IDictionary<String, Int32> FileIds
{
get

View File

@@ -57,7 +57,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -94,7 +94,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -123,7 +123,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,
@@ -152,7 +152,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
var originalBytes = context.Memory.CurrentBytes;
try
{
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
var options = new EvaluationOptions
{
MaxMemory = context.Memory.MaxBytes,

View File

@@ -1,76 +0,0 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
/// </summary>
public interface IPipelineTemplateEvaluator
{
Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
String EvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
StringComparer keyComparer);
Boolean EvaluateStepIf(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState);
Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
Int32 EvaluateStepTimeout(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
Dictionary<String, String> EvaluateJobOutput(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
TemplateToken EvaluateEnvironmentUrl(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
Dictionary<String, String> EvaluateJobDefaultsRun(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
Snapshot EvaluateJobSnapshotRequest(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions);
}
}

View File

@@ -663,7 +663,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{

View File

@@ -18,7 +18,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class PipelineTemplateEvaluator : IPipelineTemplateEvaluator
public class PipelineTemplateEvaluator
{
public PipelineTemplateEvaluator(
ITraceWriter trace,

View File

@@ -18,16 +18,6 @@ namespace GitHub.DistributedTask.WebApi
internal set;
}
/// <summary>
/// The url to refresh tokens with legacy service
/// </summary>
[JsonProperty("legacy_authorization_url")]
public Uri LegacyAuthorizationUrl
{
get;
internal set;
}
/// <summary>
/// The url to connect to poll for messages
/// </summary>

View File

@@ -1,111 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
[JsonObject]
public sealed class ArrayExpressionData : ExpressionData, IEnumerable<ExpressionData>, IReadOnlyArray
{
public ArrayExpressionData()
: base(ExpressionDataType.Array)
{
}
[IgnoreDataMember]
public Int32 Count => m_items?.Count ?? 0;
public ExpressionData this[Int32 index] => m_items[index];
Object IReadOnlyArray.this[Int32 index] => m_items[index];
public void Add(ExpressionData item)
{
if (m_items == null)
{
m_items = new List<ExpressionData>();
}
m_items.Add(item);
}
public override ExpressionData Clone()
{
var result = new ArrayExpressionData();
if (m_items?.Count > 0)
{
result.m_items = new List<ExpressionData>(m_items.Count);
foreach (var item in m_items)
{
result.m_items.Add(item);
}
}
return result;
}
public override JToken ToJToken()
{
var result = new JArray();
if (m_items?.Count > 0)
{
foreach (var item in m_items)
{
result.Add(item?.ToJToken() ?? JValue.CreateNull());
}
}
return result;
}
public IEnumerator<ExpressionData> GetEnumerator()
{
if (m_items?.Count > 0)
{
foreach (var item in m_items)
{
yield return item;
}
}
}
IEnumerator IEnumerable.GetEnumerator()
{
if (m_items?.Count > 0)
{
foreach (var item in m_items)
{
yield return item;
}
}
}
IEnumerator IReadOnlyArray.GetEnumerator()
{
if (m_items?.Count > 0)
{
foreach (var item in m_items)
{
yield return item;
}
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_items?.Count == 0)
{
m_items = null;
}
}
[DataMember(Name = "a", EmitDefaultValue = false)]
private List<ExpressionData> m_items;
}
}

View File

@@ -1,58 +0,0 @@
using System;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
public sealed class BooleanExpressionData : ExpressionData, IBoolean
{
public BooleanExpressionData(Boolean value)
: base(ExpressionDataType.Boolean)
{
m_value = value;
}
public Boolean Value
{
get
{
return m_value;
}
}
public override ExpressionData Clone()
{
return new BooleanExpressionData(m_value);
}
public override JToken ToJToken()
{
return (JToken)m_value;
}
public override String ToString()
{
return m_value ? "true" : "false";
}
Boolean IBoolean.GetBoolean()
{
return Value;
}
public static implicit operator Boolean(BooleanExpressionData data)
{
return data.Value;
}
public static implicit operator BooleanExpressionData(Boolean data)
{
return new BooleanExpressionData(data);
}
[DataMember(Name = "b", EmitDefaultValue = false)]
private Boolean m_value;
}
}

View File

@@ -1,289 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
[JsonObject]
public class CaseSensitiveDictionaryExpressionData : ExpressionData, IEnumerable<KeyValuePair<String, ExpressionData>>, IReadOnlyObject
{
public CaseSensitiveDictionaryExpressionData()
: base(ExpressionDataType.CaseSensitiveDictionary)
{
}
[IgnoreDataMember]
public Int32 Count => m_list?.Count ?? 0;
[IgnoreDataMember]
public IEnumerable<String> Keys
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Key;
}
}
}
}
[IgnoreDataMember]
public IEnumerable<ExpressionData> Values
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Value;
}
}
}
}
IEnumerable<Object> IReadOnlyObject.Values
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Value;
}
}
}
}
private Dictionary<String, Int32> IndexLookup
{
get
{
if (m_indexLookup == null)
{
m_indexLookup = new Dictionary<String, Int32>(StringComparer.Ordinal);
if (m_list?.Count > 0)
{
for (var i = 0; i < m_list.Count; i++)
{
var pair = m_list[i];
m_indexLookup.Add(pair.Key, i);
}
}
}
return m_indexLookup;
}
}
private List<DictionaryExpressionDataPair> List
{
get
{
if (m_list == null)
{
m_list = new List<DictionaryExpressionDataPair>();
}
return m_list;
}
}
public ExpressionData this[String key]
{
get
{
var index = IndexLookup[key];
return m_list[index].Value;
}
set
{
// Existing
if (IndexLookup.TryGetValue(key, out var index))
{
key = m_list[index].Key; // preserve casing
m_list[index] = new DictionaryExpressionDataPair(key, value);
}
// New
else
{
Add(key, value);
}
}
}
Object IReadOnlyObject.this[String key]
{
get
{
var index = IndexLookup[key];
return m_list[index].Value;
}
}
internal KeyValuePair<String, ExpressionData> this[Int32 index]
{
get
{
var pair = m_list[index];
return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
public void Add(IEnumerable<KeyValuePair<String, ExpressionData>> pairs)
{
foreach (var pair in pairs)
{
Add(pair.Key, pair.Value);
}
}
public void Add(
String key,
ExpressionData value)
{
IndexLookup.Add(key, m_list?.Count ?? 0);
List.Add(new DictionaryExpressionDataPair(key, value));
}
public override ExpressionData Clone()
{
var result = new CaseSensitiveDictionaryExpressionData();
if (m_list?.Count > 0)
{
result.m_list = new List<DictionaryExpressionDataPair>(m_list.Count);
foreach (var item in m_list)
{
result.m_list.Add(new DictionaryExpressionDataPair(item.Key, item.Value?.Clone()));
}
}
return result;
}
public override JToken ToJToken()
{
var json = new JObject();
if (m_list?.Count > 0)
{
foreach (var item in m_list)
{
json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull());
}
}
return json;
}
public Boolean ContainsKey(String key)
{
return TryGetValue(key, out _);
}
public IEnumerator<KeyValuePair<String, ExpressionData>> GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
}
IEnumerator IEnumerable.GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
}
IEnumerator IReadOnlyObject.GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, Object>(pair.Key, pair.Value);
}
}
}
public Boolean TryGetValue(
String key,
out ExpressionData value)
{
if (m_list?.Count > 0 &&
IndexLookup.TryGetValue(key, out var index))
{
value = m_list[index].Value;
return true;
}
value = null;
return false;
}
Boolean IReadOnlyObject.TryGetValue(
String key,
out Object value)
{
if (TryGetValue(key, out ExpressionData data))
{
value = data;
return true;
}
value = null;
return false;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_list?.Count == 0)
{
m_list = null;
}
}
[DataContract]
private sealed class DictionaryExpressionDataPair
{
public DictionaryExpressionDataPair(
String key,
ExpressionData value)
{
Key = key;
Value = value;
}
[DataMember(Name = "k")]
public readonly String Key;
[DataMember(Name = "v")]
public readonly ExpressionData Value;
}
private Dictionary<String, Int32> m_indexLookup;
[DataMember(Name = "d", EmitDefaultValue = false)]
private List<DictionaryExpressionDataPair> m_list;
}
}

View File

@@ -1,289 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
[JsonObject]
public class DictionaryExpressionData : ExpressionData, IEnumerable<KeyValuePair<String, ExpressionData>>, IReadOnlyObject
{
public DictionaryExpressionData()
: base(ExpressionDataType.Dictionary)
{
}
[IgnoreDataMember]
public Int32 Count => m_list?.Count ?? 0;
[IgnoreDataMember]
public IEnumerable<String> Keys
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Key;
}
}
}
}
[IgnoreDataMember]
public IEnumerable<ExpressionData> Values
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Value;
}
}
}
}
IEnumerable<Object> IReadOnlyObject.Values
{
get
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return pair.Value;
}
}
}
}
private Dictionary<String, Int32> IndexLookup
{
get
{
if (m_indexLookup == null)
{
m_indexLookup = new Dictionary<String, Int32>(StringComparer.OrdinalIgnoreCase);
if (m_list?.Count > 0)
{
for (var i = 0; i < m_list.Count; i++)
{
var pair = m_list[i];
m_indexLookup.Add(pair.Key, i);
}
}
}
return m_indexLookup;
}
}
private List<DictionaryExpressionDataPair> List
{
get
{
if (m_list == null)
{
m_list = new List<DictionaryExpressionDataPair>();
}
return m_list;
}
}
public ExpressionData this[String key]
{
get
{
var index = IndexLookup[key];
return m_list[index].Value;
}
set
{
// Existing
if (IndexLookup.TryGetValue(key, out var index))
{
key = m_list[index].Key; // preserve casing
m_list[index] = new DictionaryExpressionDataPair(key, value);
}
// New
else
{
Add(key, value);
}
}
}
Object IReadOnlyObject.this[String key]
{
get
{
var index = IndexLookup[key];
return m_list[index].Value;
}
}
internal KeyValuePair<String, ExpressionData> this[Int32 index]
{
get
{
var pair = m_list[index];
return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
public void Add(IEnumerable<KeyValuePair<String, ExpressionData>> pairs)
{
foreach (var pair in pairs)
{
Add(pair.Key, pair.Value);
}
}
public void Add(
String key,
ExpressionData value)
{
IndexLookup.Add(key, m_list?.Count ?? 0);
List.Add(new DictionaryExpressionDataPair(key, value));
}
public override ExpressionData Clone()
{
var result = new DictionaryExpressionData();
if (m_list?.Count > 0)
{
result.m_list = new List<DictionaryExpressionDataPair>(m_list.Count);
foreach (var item in m_list)
{
result.m_list.Add(new DictionaryExpressionDataPair(item.Key, item.Value?.Clone()));
}
}
return result;
}
public override JToken ToJToken()
{
var json = new JObject();
if (m_list?.Count > 0)
{
foreach (var item in m_list)
{
json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull());
}
}
return json;
}
public Boolean ContainsKey(String key)
{
return TryGetValue(key, out _);
}
public IEnumerator<KeyValuePair<String, ExpressionData>> GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
}
IEnumerator IEnumerable.GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
}
}
}
IEnumerator IReadOnlyObject.GetEnumerator()
{
if (m_list?.Count > 0)
{
foreach (var pair in m_list)
{
yield return new KeyValuePair<String, Object>(pair.Key, pair.Value);
}
}
}
public Boolean TryGetValue(
String key,
out ExpressionData value)
{
if (m_list?.Count > 0 &&
IndexLookup.TryGetValue(key, out var index))
{
value = m_list[index].Value;
return true;
}
value = null;
return false;
}
Boolean IReadOnlyObject.TryGetValue(
String key,
out Object value)
{
if (TryGetValue(key, out ExpressionData data))
{
value = data;
return true;
}
value = null;
return false;
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_list?.Count == 0)
{
m_list = null;
}
}
[DataContract]
private sealed class DictionaryExpressionDataPair
{
public DictionaryExpressionDataPair(
String key,
ExpressionData value)
{
Key = key;
Value = value;
}
[DataMember(Name = "k")]
public readonly String Key;
[DataMember(Name = "v")]
public readonly ExpressionData Value;
}
private Dictionary<String, Int32> m_indexLookup;
[DataMember(Name = "d", EmitDefaultValue = false)]
private List<DictionaryExpressionDataPair> m_list;
}
}

View File

@@ -1,27 +0,0 @@
using System;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
/// <summary>
/// Base class for all template tokens
/// </summary>
[DataContract]
[JsonConverter(typeof(ExpressionDataJsonConverter))]
public abstract class ExpressionData
{
protected ExpressionData(Int32 type)
{
Type = type;
}
[DataMember(Name = "t", EmitDefaultValue = false)]
internal Int32 Type { get; }
public abstract ExpressionData Clone();
public abstract JToken ToJToken();
}
}

View File

@@ -1,156 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
namespace GitHub.Actions.Expressions.Data
{
public static class ExpressionDataExtensions
{
public static ArrayExpressionData AssertArray(
this ExpressionData value,
String objectDescription)
{
if (value is ArrayExpressionData array)
{
return array;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(ArrayExpressionData)}' was expected.");
}
public static DictionaryExpressionData AssertDictionary(
this ExpressionData value,
String objectDescription)
{
if (value is DictionaryExpressionData dictionary)
{
return dictionary;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(DictionaryExpressionData)}' was expected.");
}
public static StringExpressionData AssertString(
this ExpressionData value,
String objectDescription)
{
if (value is StringExpressionData str)
{
return str;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(StringExpressionData)}' was expected.");
}
/// <summary>
/// Returns all context data objects (depth first)
/// </summary>
public static IEnumerable<ExpressionData> Traverse(this ExpressionData value)
{
return Traverse(value, omitKeys: false);
}
/// <summary>
/// Returns all context data objects (depth first)
/// </summary>
/// <param name="omitKeys">If true, dictionary keys are omitted</param>
public static IEnumerable<ExpressionData> Traverse(
this ExpressionData value,
Boolean omitKeys)
{
yield return value;
if (value is ArrayExpressionData || value is DictionaryExpressionData)
{
var state = new TraversalState(null, value);
while (state != null)
{
if (state.MoveNext(omitKeys))
{
value = state.Current;
yield return value;
if (value is ArrayExpressionData || value is DictionaryExpressionData)
{
state = new TraversalState(state, value);
}
}
else
{
state = state.Parent;
}
}
}
}
private sealed class TraversalState
{
public TraversalState(
TraversalState parent,
ExpressionData data)
{
Parent = parent;
m_data = data;
}
public Boolean MoveNext(Boolean omitKeys)
{
switch (m_data.Type)
{
case ExpressionDataType.Array:
var array = m_data.AssertArray("array");
if (++m_index < array.Count)
{
Current = array[m_index];
return true;
}
else
{
Current = null;
return false;
}
case ExpressionDataType.Dictionary:
var dictionary = m_data.AssertDictionary("dictionary");
// Return the value
if (m_isKey)
{
m_isKey = false;
Current = dictionary[m_index].Value;
return true;
}
if (++m_index < dictionary.Count)
{
// Skip the key, return the value
if (omitKeys)
{
m_isKey = false;
Current = dictionary[m_index].Value;
return true;
}
// Return the key
m_isKey = true;
Current = new StringExpressionData(dictionary[m_index].Key);
return true;
}
Current = null;
return false;
default:
throw new NotSupportedException($"Unexpected {nameof(ExpressionData)} type '{m_data.Type}'");
}
}
private ExpressionData m_data;
private Int32 m_index = -1;
private Boolean m_isKey;
public ExpressionData Current;
public TraversalState Parent;
}
}
}

View File

@@ -1,199 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Collections.Generic;
using System.Reflection;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
/// <summary>
/// JSON serializer for ExpressionData objects
/// </summary>
internal sealed class ExpressionDataJsonConverter : JsonConverter
{
public override Boolean CanWrite
{
get
{
return true;
}
}
public override Boolean CanConvert(Type objectType)
{
return typeof(ExpressionData).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
switch (reader.TokenType)
{
case JsonToken.String:
return new StringExpressionData(reader.Value.ToString());
case JsonToken.Boolean:
return new BooleanExpressionData((Boolean)reader.Value);
case JsonToken.Float:
return new NumberExpressionData((Double)reader.Value);
case JsonToken.Integer:
return new NumberExpressionData((Double)(Int64)reader.Value);
case JsonToken.StartObject:
break;
default:
return null;
}
Int32? type = null;
JObject value = JObject.Load(reader);
if (!value.TryGetValue("t", StringComparison.OrdinalIgnoreCase, out JToken typeValue))
{
type = ExpressionDataType.String;
}
else if (typeValue.Type == JTokenType.Integer)
{
type = (Int32)typeValue;
}
else
{
return existingValue;
}
Object newValue = null;
switch (type)
{
case ExpressionDataType.String:
newValue = new StringExpressionData(null);
break;
case ExpressionDataType.Array:
newValue = new ArrayExpressionData();
break;
case ExpressionDataType.Dictionary:
newValue = new DictionaryExpressionData();
break;
case ExpressionDataType.Boolean:
newValue = new BooleanExpressionData(false);
break;
case ExpressionDataType.Number:
newValue = new NumberExpressionData(0);
break;
case ExpressionDataType.CaseSensitiveDictionary:
newValue = new CaseSensitiveDictionaryExpressionData();
break;
default:
throw new NotSupportedException($"Unexpected {nameof(ExpressionDataType)} '{type}'");
}
if (value != null)
{
using JsonReader objectReader = value.CreateReader();
serializer.Populate(objectReader, newValue);
}
return newValue;
}
public override void WriteJson(
JsonWriter writer,
Object value,
JsonSerializer serializer)
{
if (Object.ReferenceEquals(value, null))
{
writer.WriteNull();
}
else if (value is StringExpressionData stringData)
{
writer.WriteValue(stringData.Value);
}
else if (value is BooleanExpressionData boolData)
{
writer.WriteValue(boolData.Value);
}
else if (value is NumberExpressionData numberData)
{
writer.WriteValue(numberData.Value);
}
else if (value is ArrayExpressionData arrayData)
{
writer.WriteStartObject();
writer.WritePropertyName("t");
writer.WriteValue(ExpressionDataType.Array);
if (arrayData.Count > 0)
{
writer.WritePropertyName("a");
writer.WriteStartArray();
foreach (var item in arrayData)
{
serializer.Serialize(writer, item);
}
writer.WriteEndArray();
}
writer.WriteEndObject();
}
else if (value is DictionaryExpressionData dictionaryData)
{
writer.WriteStartObject();
writer.WritePropertyName("t");
writer.WriteValue(ExpressionDataType.Dictionary);
if (dictionaryData.Count > 0)
{
writer.WritePropertyName("d");
writer.WriteStartArray();
foreach (var pair in dictionaryData)
{
writer.WriteStartObject();
writer.WritePropertyName("k");
writer.WriteValue(pair.Key);
writer.WritePropertyName("v");
serializer.Serialize(writer, pair.Value);
writer.WriteEndObject();
}
writer.WriteEndArray();
}
writer.WriteEndObject();
}
else if (value is CaseSensitiveDictionaryExpressionData caseSensitiveDictionaryData)
{
writer.WriteStartObject();
writer.WritePropertyName("t");
writer.WriteValue(ExpressionDataType.CaseSensitiveDictionary);
if (caseSensitiveDictionaryData.Count > 0)
{
writer.WritePropertyName("d");
writer.WriteStartArray();
foreach (var pair in caseSensitiveDictionaryData)
{
writer.WriteStartObject();
writer.WritePropertyName("k");
writer.WriteValue(pair.Key);
writer.WritePropertyName("v");
serializer.Serialize(writer, pair.Value);
writer.WriteEndObject();
}
writer.WriteEndArray();
}
writer.WriteEndObject();
}
else
{
throw new NotSupportedException($"Unexpected type '{value.GetType().Name}'");
}
}
}
}

View File

@@ -1,19 +0,0 @@
using System;
namespace GitHub.Actions.Expressions.Data
{
internal static class ExpressionDataType
{
internal const Int32 String = 0;
internal const Int32 Array = 1;
internal const Int32 Dictionary = 2;
internal const Int32 Boolean = 3;
internal const Int32 Number = 4;
internal const Int32 CaseSensitiveDictionary = 5;
}
}

View File

@@ -1,64 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
public static class JTokenExtensions
{
public static ExpressionData ToExpressionData(this JToken value)
{
return value.ToExpressionData(1, 100);
}
public static ExpressionData ToExpressionData(
this JToken value,
Int32 depth,
Int32 maxDepth)
{
if (depth < maxDepth)
{
if (value.Type == JTokenType.String)
{
return new StringExpressionData((String)value);
}
else if (value.Type == JTokenType.Boolean)
{
return new BooleanExpressionData((Boolean)value);
}
else if (value.Type == JTokenType.Float || value.Type == JTokenType.Integer)
{
return new NumberExpressionData((Double)value);
}
else if (value.Type == JTokenType.Object)
{
var subContext = new DictionaryExpressionData();
var obj = (JObject)value;
foreach (var property in obj.Properties())
{
subContext[property.Name] = ToExpressionData(property.Value, depth + 1, maxDepth);
}
return subContext;
}
else if (value.Type == JTokenType.Array)
{
var arrayContext = new ArrayExpressionData();
var arr = (JArray)value;
foreach (var element in arr)
{
arrayContext.Add(ToExpressionData(element, depth + 1, maxDepth));
}
return arrayContext;
}
else if (value.Type == JTokenType.Null)
{
return null;
}
}
// We don't understand the type or have reached our max, return as string
return new StringExpressionData(value.ToString());
}
}
}

View File

@@ -1,78 +0,0 @@
using System;
using System.Globalization;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
public sealed class NumberExpressionData : ExpressionData, INumber
{
public NumberExpressionData(Double value)
: base(ExpressionDataType.Number)
{
m_value = value;
}
public Double Value
{
get
{
return m_value;
}
}
public override ExpressionData Clone()
{
return new NumberExpressionData(m_value);
}
public override JToken ToJToken()
{
if (Double.IsNaN(m_value) || m_value == Double.PositiveInfinity || m_value == Double.NegativeInfinity)
{
return (JToken)m_value;
}
var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{
var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
return (JToken)flooredInt;
}
else
{
return (JToken)m_value;
}
}
public override String ToString()
{
return m_value.ToString("G15", CultureInfo.InvariantCulture);
}
Double INumber.GetNumber()
{
return Value;
}
public static implicit operator Double(NumberExpressionData data)
{
return data.Value;
}
public static implicit operator NumberExpressionData(Double data)
{
return new NumberExpressionData(data);
}
[DataMember(Name = "n", EmitDefaultValue = false)]
private Double m_value;
}
}

View File

@@ -1,74 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Runtime.Serialization;
using GitHub.Actions.Expressions.Sdk;
using Newtonsoft.Json.Linq;
namespace GitHub.Actions.Expressions.Data
{
[DataContract]
public sealed class StringExpressionData : ExpressionData, IString
{
public StringExpressionData(String value)
: base(ExpressionDataType.String)
{
m_value = value;
}
public String Value
{
get
{
if (m_value == null)
{
m_value = String.Empty;
}
return m_value;
}
}
public override ExpressionData Clone()
{
return new StringExpressionData(m_value);
}
public override JToken ToJToken()
{
return (JToken)m_value;
}
String IString.GetString()
{
return Value;
}
public override String ToString()
{
return Value;
}
public static implicit operator String(StringExpressionData data)
{
return data.Value;
}
public static implicit operator StringExpressionData(String data)
{
return new StringExpressionData(data);
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (m_value?.Length == 0)
{
m_value = null;
}
}
[DataMember(Name = "s", EmitDefaultValue = false)]
private String m_value;
}
}

View File

@@ -1,50 +0,0 @@
using System;
namespace GitHub.Actions.Expressions
{
public sealed class EvaluationOptions
{
public EvaluationOptions()
{
}
public EvaluationOptions(EvaluationOptions copy)
{
if (copy != null)
{
MaxMemory = copy.MaxMemory;
MaxCacheMemory = copy.MaxCacheMemory;
StrictJsonParsing = copy.StrictJsonParsing;
AlwaysTraceExpanded = copy.AlwaysTraceExpanded;
}
}
/// <summary>
/// Maximum memory (in bytes) allowed during expression evaluation.
/// Memory is tracked across the entire expression tree evaluation to protect against DOS attacks.
/// Default is 1 MB (1048576 bytes) if not specified.
/// </summary>
public Int32 MaxMemory { get; set; }
/// <summary>
/// Maximum memory (in bytes) allowed for caching expanded expression results during tracing.
/// When exceeded, the cache is cleared and expressions may not be fully expanded in trace output.
/// Default is 1 MB (1048576 bytes) if not specified.
/// </summary>
public Int32 MaxCacheMemory { get; set; }
/// <summary>
/// Whether to enforce strict JSON parsing in the fromJson function.
/// When true, rejects JSON with comments, trailing commas, single quotes, and other non-standard features.
/// Default is false if not specified.
/// </summary>
public Boolean StrictJsonParsing { get; set; }
/// <summary>
/// Whether to always include the expanded expression in trace output.
/// When true, the expanded expression is always traced even if it matches the original expression or result.
/// Default is false if not specified.
/// </summary>
public Boolean AlwaysTraceExpanded { get; set; }
}
}

View File

@@ -1,459 +0,0 @@
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
using System;
using System.Globalization;
using System.Linq;
using GitHub.Actions.Expressions.Sdk;
namespace GitHub.Actions.Expressions
{
public sealed class EvaluationResult
{
internal EvaluationResult(
EvaluationContext context,
Int32 level,
Object val,
ValueKind kind,
Object raw)
: this(context, level, val, kind, raw, false)
{
}
internal EvaluationResult(
EvaluationContext context,
Int32 level,
Object val,
ValueKind kind,
Object raw,
Boolean omitTracing)
{
m_level = level;
Value = val;
Kind = kind;
Raw = raw;
m_omitTracing = omitTracing;
if (!omitTracing)
{
TraceValue(context);
}
}
public ValueKind Kind { get; }
/// <summary>
/// When an interface converter is applied to the node result, raw contains the original value
/// </summary>
public Object Raw { get; }
public Object Value { get; }
public Boolean IsFalsy
{
get
{
switch (Kind)
{
case ValueKind.Null:
return true;
case ValueKind.Boolean:
var boolean = (Boolean)Value;
return !boolean;
case ValueKind.Number:
var number = (Double)Value;
return number == 0d || Double.IsNaN(number);
case ValueKind.String:
var str = (String)Value;
return String.Equals(str, String.Empty, StringComparison.Ordinal);
default:
return false;
}
}
}
public Boolean IsPrimitive => ExpressionUtility.IsPrimitive(Kind);
public Boolean IsTruthy => !IsFalsy;
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractEqual(EvaluationResult right)
{
return AbstractEqual(Value, right.Value);
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractGreaterThan(EvaluationResult right)
{
return AbstractGreaterThan(Value, right.Value);
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractGreaterThanOrEqual(EvaluationResult right)
{
return AbstractEqual(Value, right.Value) || AbstractGreaterThan(Value, right.Value);
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractLessThan(EvaluationResult right)
{
return AbstractLessThan(Value, right.Value);
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractLessThanOrEqual(EvaluationResult right)
{
return AbstractEqual(Value, right.Value) || AbstractLessThan(Value, right.Value);
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
public Boolean AbstractNotEqual(EvaluationResult right)
{
return !AbstractEqual(Value, right.Value);
}
public Double ConvertToNumber()
{
return ConvertToNumber(Value);
}
public String ConvertToString()
{
switch (Kind)
{
case ValueKind.Null:
return String.Empty;
case ValueKind.Boolean:
return ((Boolean)Value) ? ExpressionConstants.True : ExpressionConstants.False;
case ValueKind.Number:
if ((Double)Value == -0)
{
// .NET Core 3.0 now prints negative zero as -0, so we need this to keep out behavior consistent
return ((Double)0).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture);
}
return ((Double)Value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture);
case ValueKind.String:
return Value as String;
default:
return Kind.ToString();
}
}
public Boolean TryGetCollectionInterface(out Object collection)
{
if ((Kind == ValueKind.Object || Kind == ValueKind.Array))
{
var obj = Value;
if (obj is IReadOnlyObject)
{
collection = obj;
return true;
}
else if (obj is IReadOnlyArray)
{
collection = obj;
return true;
}
}
collection = null;
return false;
}
/// <summary>
/// Useful for working with values that are not the direct evaluation result of a parameter.
/// This allows ExpressionNode authors to leverage the coercion and comparison functions
/// for any values.
///
/// Also note, the value will be canonicalized (for example numeric types converted to double) and any
/// matching interfaces applied.
/// </summary>
public static EvaluationResult CreateIntermediateResult(
EvaluationContext context,
Object obj)
{
var val = ExpressionUtility.ConvertToCanonicalValue(obj, out ValueKind kind, out Object raw);
return new EvaluationResult(context, 0, val, kind, raw, omitTracing: true);
}
private void TraceValue(EvaluationContext context)
{
if (!m_omitTracing)
{
TraceValue(context, Value, Kind);
}
}
private void TraceValue(
EvaluationContext context,
Object val,
ValueKind kind)
{
if (!m_omitTracing)
{
TraceVerbose(context, String.Concat("=> ", ExpressionUtility.FormatValue(context?.SecretMasker, val, kind)));
}
}
private void TraceVerbose(
EvaluationContext context,
String message)
{
if (!m_omitTracing)
{
context?.Trace.Verbose(String.Empty.PadLeft(m_level * 2, '.') + (message ?? String.Empty));
}
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
private static Boolean AbstractEqual(
Object canonicalLeftValue,
Object canonicalRightValue)
{
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
// Same kind
if (leftKind == rightKind)
{
switch (leftKind)
{
// Null, Null
case ValueKind.Null:
return true;
// Number, Number
case ValueKind.Number:
var leftDouble = (Double)canonicalLeftValue;
var rightDouble = (Double)canonicalRightValue;
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
{
return false;
}
return leftDouble == rightDouble;
// String, String
case ValueKind.String:
var leftString = (String)canonicalLeftValue;
var rightString = (String)canonicalRightValue;
return String.Equals(leftString, rightString, StringComparison.OrdinalIgnoreCase);
// Boolean, Boolean
case ValueKind.Boolean:
var leftBoolean = (Boolean)canonicalLeftValue;
var rightBoolean = (Boolean)canonicalRightValue;
return leftBoolean == rightBoolean;
// Object, Object
case ValueKind.Object:
case ValueKind.Array:
return Object.ReferenceEquals(canonicalLeftValue, canonicalRightValue);
}
}
return false;
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
private static Boolean AbstractGreaterThan(
Object canonicalLeftValue,
Object canonicalRightValue)
{
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
// Same kind
if (leftKind == rightKind)
{
switch (leftKind)
{
// Number, Number
case ValueKind.Number:
var leftDouble = (Double)canonicalLeftValue;
var rightDouble = (Double)canonicalRightValue;
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
{
return false;
}
return leftDouble > rightDouble;
// String, String
case ValueKind.String:
var leftString = (String)canonicalLeftValue;
var rightString = (String)canonicalRightValue;
return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) > 0;
// Boolean, Boolean
case ValueKind.Boolean:
var leftBoolean = (Boolean)canonicalLeftValue;
var rightBoolean = (Boolean)canonicalRightValue;
return leftBoolean && !rightBoolean;
}
}
return false;
}
/// <summary>
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
/// </summary>
private static Boolean AbstractLessThan(
Object canonicalLeftValue,
Object canonicalRightValue)
{
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
// Same kind
if (leftKind == rightKind)
{
switch (leftKind)
{
// Number, Number
case ValueKind.Number:
var leftDouble = (Double)canonicalLeftValue;
var rightDouble = (Double)canonicalRightValue;
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
{
return false;
}
return leftDouble < rightDouble;
// String, String
case ValueKind.String:
var leftString = (String)canonicalLeftValue;
var rightString = (String)canonicalRightValue;
return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) < 0;
// Boolean, Boolean
case ValueKind.Boolean:
var leftBoolean = (Boolean)canonicalLeftValue;
var rightBoolean = (Boolean)canonicalRightValue;
return !leftBoolean && rightBoolean;
}
}
return false;
}
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
/// Except objects are not coerced to primitives.
private static void CoerceTypes(
ref Object canonicalLeftValue,
ref Object canonicalRightValue,
out ValueKind leftKind,
out ValueKind rightKind)
{
leftKind = GetKind(canonicalLeftValue);
rightKind = GetKind(canonicalRightValue);
// Same kind
if (leftKind == rightKind)
{
}
// Number, String
else if (leftKind == ValueKind.Number && rightKind == ValueKind.String)
{
canonicalRightValue = ConvertToNumber(canonicalRightValue);
rightKind = ValueKind.Number;
}
// String, Number
else if (leftKind == ValueKind.String && rightKind == ValueKind.Number)
{
canonicalLeftValue = ConvertToNumber(canonicalLeftValue);
leftKind = ValueKind.Number;
}
// Boolean|Null, Any
else if (leftKind == ValueKind.Boolean || leftKind == ValueKind.Null)
{
canonicalLeftValue = ConvertToNumber(canonicalLeftValue);
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind);
}
// Any, Boolean|Null
else if (rightKind == ValueKind.Boolean || rightKind == ValueKind.Null)
{
canonicalRightValue = ConvertToNumber(canonicalRightValue);
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind);
}
}
/// <summary>
/// For primitives, follows the Javascript rules (the Number function in Javascript). Otherwise NaN.
/// </summary>
private static Double ConvertToNumber(Object canonicalValue)
{
var kind = GetKind(canonicalValue);
switch (kind)
{
case ValueKind.Null:
return 0d;
case ValueKind.Boolean:
return (Boolean)canonicalValue ? 1d : 0d;
case ValueKind.Number:
return (Double)canonicalValue;
case ValueKind.String:
return ExpressionUtility.ParseNumber(canonicalValue as String);
}
return Double.NaN;
}
private static ValueKind GetKind(Object canonicalValue)
{
if (Object.ReferenceEquals(canonicalValue, null))
{
return ValueKind.Null;
}
else if (canonicalValue is Boolean)
{
return ValueKind.Boolean;
}
else if (canonicalValue is Double)
{
return ValueKind.Number;
}
else if (canonicalValue is String)
{
return ValueKind.String;
}
else if (canonicalValue is IReadOnlyObject)
{
return ValueKind.Object;
}
else if (canonicalValue is IReadOnlyArray)
{
return ValueKind.Array;
}
return ValueKind.Object;
}
private readonly Int32 m_level;
private readonly Boolean m_omitTracing;
}
}

Some files were not shown because too many files have changed in this diff Show More