mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
59 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f99c3e6ee8 | ||
|
|
463496e4fb | ||
|
|
3f9f6f3994 | ||
|
|
221f65874f | ||
|
|
9a21440691 | ||
|
|
54bcc001e5 | ||
|
|
7df164d2c7 | ||
|
|
a54f380b0e | ||
|
|
8b184c3871 | ||
|
|
b56b161118 | ||
|
|
69aca04de1 | ||
|
|
b3a60e6b06 | ||
|
|
334df748d1 | ||
|
|
b08f962182 | ||
|
|
b8144769c6 | ||
|
|
2a00363a90 | ||
|
|
a1c09806c3 | ||
|
|
c0776daddb | ||
|
|
b5b7986cd6 | ||
|
|
53d69ff441 | ||
|
|
bca18f71d0 | ||
|
|
1b8efb99f6 | ||
|
|
0b2c71fc31 | ||
|
|
60af948051 | ||
|
|
ff775ca101 | ||
|
|
f74be39e77 | ||
|
|
1eb15f28a7 | ||
|
|
afe4fc8446 | ||
|
|
a12731d34d | ||
|
|
18f2450d71 | ||
|
|
2c5f29c3ca | ||
|
|
c9de9a8699 | ||
|
|
68ff57dbc4 | ||
|
|
c774eb8d46 | ||
|
|
f184048a9a | ||
|
|
338d83a941 | ||
|
|
0b074a3e93 | ||
|
|
25faeabaa8 | ||
|
|
b121ef832b | ||
|
|
170033c92b | ||
|
|
f9c4e17fd9 | ||
|
|
646da708ba | ||
|
|
bf8236344b | ||
|
|
720f16aef6 | ||
|
|
f77066a6a8 | ||
|
|
df83df2a32 | ||
|
|
97b2254146 | ||
|
|
7f72ba9e48 | ||
|
|
f8ae5bb1a7 | ||
|
|
a5631456a2 | ||
|
|
65dfa460ba | ||
|
|
80ee51f164 | ||
|
|
c95883f28e | ||
|
|
6e940643a9 | ||
|
|
629f2384a4 | ||
|
|
c3bf70becb | ||
|
|
8b65f5f9df | ||
|
|
5f1efec208 | ||
|
|
20d82ad357 |
@@ -4,7 +4,7 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "8.0.412"
|
||||
"version": "8.0.416"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,8 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🛑 Request a feature in the runner application
|
||||
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
|
||||
url: https://github.com/orgs/community/discussions/categories/actions
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
|
||||
- name: ✅ Support for GitHub Actions
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||
|
||||
50
.github/workflows/build.yml
vendored
50
.github/workflows/build.yml
vendored
@@ -14,6 +14,9 @@ on:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
@@ -75,8 +78,53 @@ jobs:
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
|
||||
docker:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, ubuntu-24.04-arm ]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
docker_platform: linux/amd64
|
||||
- os: ubuntu-24.04-arm
|
||||
docker_platform: linux/arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Get latest runner version
|
||||
id: latest_runner
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const release = await github.rest.repos.getLatestRelease({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
});
|
||||
const version = release.data.tag_name.replace(/^v/, '');
|
||||
core.setOutput('version', version);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
load: true
|
||||
platforms: ${{ matrix.docker_platform }}
|
||||
tags: |
|
||||
${{ github.sha }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
|
||||
|
||||
- name: Test Docker image
|
||||
run: |
|
||||
docker run --rm ${{ github.sha }}:latest ./run.sh --version
|
||||
|
||||
|
||||
2
.github/workflows/close-bugs-bot.yml
vendored
2
.github/workflows/close-bugs-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
|
||||
4
.github/workflows/close-features-bot.yml
vendored
4
.github/workflows/close-features-bot.yml
vendored
@@ -7,9 +7,9 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-feature"
|
||||
only-labels: "actions-feature"
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -38,4 +38,4 @@ jobs:
|
||||
working-directory: src
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
|
||||
211
.github/workflows/dependency-check.yml
vendored
Normal file
211
.github/workflows/dependency-check.yml
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
name: Dependency Status Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
check_type:
|
||||
description: "Type of dependency check"
|
||||
required: false
|
||||
default: "all"
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- node
|
||||
- dotnet
|
||||
- docker
|
||||
- npm
|
||||
schedule:
|
||||
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
|
||||
|
||||
jobs:
|
||||
dependency-status:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
node20-status: ${{ steps.check-versions.outputs.node20-status }}
|
||||
node24-status: ${{ steps.check-versions.outputs.node24-status }}
|
||||
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
|
||||
docker-status: ${{ steps.check-versions.outputs.docker-status }}
|
||||
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
|
||||
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
|
||||
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Check dependency versions
|
||||
id: check-versions
|
||||
run: |
|
||||
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check Node versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
|
||||
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
|
||||
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
|
||||
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
|
||||
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
NODE20_STATUS="✅ up-to-date"
|
||||
NODE24_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NODE20_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NODE24_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check .NET version
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
|
||||
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
|
||||
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
|
||||
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
|
||||
|
||||
DOTNET_STATUS="✅ up-to-date"
|
||||
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
|
||||
DOTNET_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check Docker versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
|
||||
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
DOCKER_STATUS="✅ up-to-date"
|
||||
BUILDX_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$current_docker" != "$latest_docker" ]; then
|
||||
DOCKER_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$current_buildx" != "$latest_buildx" ]; then
|
||||
BUILDX_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check npm vulnerabilities
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
|
||||
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
npm install --silent
|
||||
|
||||
AUDIT_OUTPUT=""
|
||||
AUDIT_EXIT_CODE=0
|
||||
# Run npm audit and capture output and exit code
|
||||
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
|
||||
AUDIT_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
# Check if output is valid JSON
|
||||
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
|
||||
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
|
||||
# Ensure VULN_COUNT is a number
|
||||
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
|
||||
VULN_COUNT=${VULN_COUNT:-0}
|
||||
|
||||
NPM_STATUS="✅ no vulnerabilities"
|
||||
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
|
||||
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
|
||||
|
||||
# Get vulnerability details
|
||||
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
|
||||
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
|
||||
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
NPM_STATUS="❌ npm audit failed"
|
||||
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check for open dependency PRs
|
||||
id: check-prs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Get open PRs with dependency label
|
||||
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
|
||||
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
|
||||
|
||||
if [ "$PR_COUNT" -gt 0 ]; then
|
||||
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
34
.github/workflows/docker-buildx-upgrade.yml
vendored
34
.github/workflows/docker-buildx-upgrade.yml
vendored
@@ -2,7 +2,7 @@ name: "Docker/Buildx Version Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 1' # Run every Monday at midnight
|
||||
- cron: "0 0 * * 1" # Run every Monday at midnight
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
@@ -134,11 +134,33 @@ jobs:
|
||||
git commit -a -m "$commit_message"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR
|
||||
pr_body="Upgrades Docker version from ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Docker Buildx version from ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}.\n\n"
|
||||
pr_body+="Release notes: https://docs.docker.com/engine/release-notes/\n\n"
|
||||
pr_body+="---\n\nAutogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)"
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated Docker and Buildx version update:
|
||||
|
||||
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
|
||||
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
|
||||
|
||||
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
|
||||
|
||||
**Release notes:** https://docs.docker.com/engine/release-notes/
|
||||
|
||||
**Next steps:**
|
||||
- Review the version changes
|
||||
- Verify container builds work as expected
|
||||
- Test multi-platform builds if applicable
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "$pr_title" \
|
||||
--body "$pr_body"
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "docker" \
|
||||
--body-file pr_body.txt
|
||||
|
||||
75
.github/workflows/docker-publish.yml
vendored
Normal file
75
.github/workflows/docker-publish.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
name: Publish DockerImage from Release Branch
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseBranch:
|
||||
description: 'Release Branch (releases/mXXX)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
publish-image:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.inputs.releaseBranch }}
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
|
||||
console.log(`Using runner version ${runnerVersion}`);
|
||||
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
|
||||
throw new Error(`Invalid runner version: ${runnerVersion}`);
|
||||
}
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.image.outputs.version }}
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
4
.github/workflows/dotnet-upgrade.yml
vendored
4
.github/workflows/dotnet-upgrade.yml
vendored
@@ -2,7 +2,7 @@ name: "DotNet SDK Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 1'
|
||||
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
|
||||
194
.github/workflows/node-upgrade.yml
vendored
Normal file
194
.github/workflows/node-upgrade.yml
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
name: Auto Update Node Version
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly, every Monday
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-node:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Get latest Node versions
|
||||
id: node-versions
|
||||
run: |
|
||||
# Get latest Node.js releases from official GitHub releases
|
||||
echo "Fetching latest Node.js releases..."
|
||||
|
||||
# Get latest v20.x release
|
||||
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
# Get latest v24.x release
|
||||
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
|
||||
# Verify these versions are available in alpine_nodejs releases
|
||||
echo "Verifying availability in alpine_nodejs..."
|
||||
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
|
||||
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v20 release
|
||||
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
|
||||
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v24 release
|
||||
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
# Validate that we have non-empty version numbers
|
||||
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
|
||||
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
|
||||
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: ensure versions match expected format (x.y.z)
|
||||
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check current versions in externals.sh
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine if updates are needed
|
||||
NEEDS_UPDATE20="false"
|
||||
NEEDS_UPDATE24="false"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NEEDS_UPDATE20="true"
|
||||
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NEEDS_UPDATE24="true"
|
||||
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
|
||||
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
|
||||
fi
|
||||
|
||||
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
|
||||
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update externals.sh and create PR
|
||||
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Final validation before making changes
|
||||
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
|
||||
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
|
||||
|
||||
echo "Final validation of versions before PR creation:"
|
||||
echo "Node 20: '$NODE20_VERSION'"
|
||||
echo "Node 24: '$NODE24_VERSION'"
|
||||
|
||||
# Validate versions are not empty and match expected format
|
||||
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All versions validated successfully"
|
||||
|
||||
# Update the files
|
||||
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
|
||||
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
|
||||
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
# Verify the changes were applied correctly
|
||||
echo "Verifying changes in externals.sh:"
|
||||
grep "NODE20_VERSION=" src/Misc/externals.sh
|
||||
grep "NODE24_VERSION=" src/Misc/externals.sh
|
||||
|
||||
# Ensure we actually have valid versions in the file
|
||||
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
|
||||
echo "::error title=Update Failed::Failed to properly update externals.sh"
|
||||
echo "Updated Node 20: '$UPDATED_NODE20'"
|
||||
echo "Updated Node 24: '$UPDATED_NODE24'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/update-node"
|
||||
git checkout -b "$branch_name"
|
||||
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << EOF
|
||||
Automated Node.js version update:
|
||||
|
||||
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
|
||||
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
|
||||
|
||||
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
|
||||
|
||||
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: update Node versions" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "node" \
|
||||
--label "javascript" \
|
||||
--body-file pr_body.txt
|
||||
|
||||
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"
|
||||
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
235
.github/workflows/npm-audit-typescript.yml
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
name: NPM Audit Fix with TypeScript Auto-Fix
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit-with-ts-fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: NPM install and audit fix with TypeScript auto-repair
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check for vulnerabilities first
|
||||
echo "Checking for npm vulnerabilities..."
|
||||
if npm audit --audit-level=moderate; then
|
||||
echo "✅ No moderate or higher vulnerabilities found"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
|
||||
|
||||
# Attempt audit fix and capture the result
|
||||
if npm audit fix; then
|
||||
echo "✅ npm audit fix completed successfully"
|
||||
AUDIT_FIX_STATUS="success"
|
||||
else
|
||||
echo "⚠️ npm audit fix failed or had issues"
|
||||
AUDIT_FIX_STATUS="failed"
|
||||
|
||||
# Try audit fix with --force as a last resort for critical/high vulns only
|
||||
echo "Checking if critical/high vulnerabilities remain..."
|
||||
if ! npm audit --audit-level=high; then
|
||||
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
|
||||
if npm audit fix --force; then
|
||||
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
|
||||
AUDIT_FIX_STATUS="force-fixed"
|
||||
else
|
||||
echo "❌ npm audit fix --force also failed"
|
||||
AUDIT_FIX_STATUS="force-failed"
|
||||
fi
|
||||
else
|
||||
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
|
||||
AUDIT_FIX_STATUS="partial-success"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
|
||||
|
||||
# Try to fix TypeScript issues automatically
|
||||
echo "Attempting to fix TypeScript compatibility issues..."
|
||||
|
||||
# Check if build fails
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Build failed, attempting automated fixes..."
|
||||
|
||||
# Common fix 1: Update @types/node to latest compatible version
|
||||
echo "Trying to update @types/node to latest version..."
|
||||
npm update @types/node
|
||||
|
||||
# Common fix 2: If that doesn't work, try installing a specific known-good version
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying specific @types/node version..."
|
||||
# Try Node 20 compatible version
|
||||
npm install --save-dev @types/node@^20.0.0
|
||||
fi
|
||||
|
||||
# Common fix 3: Clear node_modules and reinstall if still failing
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Clearing node_modules and reinstalling..."
|
||||
rm -rf node_modules package-lock.json
|
||||
npm install
|
||||
|
||||
# Re-run audit fix after clean install if it was successful before
|
||||
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
|
||||
echo "Re-running npm audit fix after clean install..."
|
||||
npm audit fix || echo "Audit fix failed on second attempt"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Common fix 4: Try updating TypeScript itself
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying to update TypeScript..."
|
||||
npm update typescript
|
||||
fi
|
||||
|
||||
# Final check
|
||||
if npm run build 2>/dev/null; then
|
||||
echo "✅ Successfully fixed TypeScript issues automatically"
|
||||
else
|
||||
echo "⚠️ Could not automatically fix TypeScript issues"
|
||||
fi
|
||||
else
|
||||
echo "✅ Build passes after audit fix"
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HUSKY: 0 # Disable husky hooks for automated commits
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-with-ts-repair"
|
||||
git checkout -b "$branch_name"
|
||||
|
||||
# Commit with --no-verify to skip husky hooks
|
||||
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Check final build status and gather info about what was changed
|
||||
build_status="✅ Build passes"
|
||||
fixes_applied=""
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
# Check what packages were updated
|
||||
if git diff HEAD~1 package.json | grep -q "@types/node"; then
|
||||
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
|
||||
fi
|
||||
if git diff HEAD~1 package.json | grep -q "typescript"; then
|
||||
fixes_applied+="\n- Updated TypeScript version"
|
||||
fi
|
||||
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
|
||||
fixes_applied+="\n- Updated package dependencies via npm audit fix"
|
||||
fi
|
||||
|
||||
if ! npm run build 2>/dev/null; then
|
||||
build_status="⚠️ Build fails - manual review required"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
|
||||
# Create enhanced PR body using here-doc for proper formatting
|
||||
audit_status_msg=""
|
||||
case "$AUDIT_FIX_STATUS" in
|
||||
"success")
|
||||
audit_status_msg="✅ **Audit Fix**: Completed successfully"
|
||||
;;
|
||||
"partial-success")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
|
||||
;;
|
||||
"force-fixed")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
|
||||
;;
|
||||
"failed"|"force-failed")
|
||||
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
|
||||
;;
|
||||
*)
|
||||
audit_status_msg="❓ **Audit Fix**: Status unknown"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ⚠️ Build fails - manual review required
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
|
||||
|
||||
Common issues and solutions:
|
||||
- Check for TypeScript version compatibility with Node.js types
|
||||
- Review breaking changes in updated dependencies
|
||||
- Consider pinning problematic dependency versions temporarily
|
||||
- Review tsconfig.json for compatibility settings
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
else
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ✅ Build passes
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ -n "$fixes_applied" ]; then
|
||||
# Add the fixes applied section to the file
|
||||
sed -i "/This workflow attempts/a\\
|
||||
\\
|
||||
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
|
||||
fi
|
||||
|
||||
# Create PR with appropriate labels
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix with TypeScript auto-repair" \
|
||||
--label "$labels" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "No changes to commit"
|
||||
fi
|
||||
137
.github/workflows/npm-audit.yml
vendored
Normal file
137
.github/workflows/npm-audit.yml
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
name: NPM Audit Fix
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: NPM install and audit fix
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check what vulnerabilities exist
|
||||
echo "=== Checking current vulnerabilities ==="
|
||||
npm audit || true
|
||||
|
||||
# Apply audit fix --force to get security updates
|
||||
echo "=== Applying npm audit fix --force ==="
|
||||
npm audit fix --force
|
||||
|
||||
# Test if build still works and set status
|
||||
echo "=== Testing build compatibility ==="
|
||||
if npm run all; then
|
||||
echo "✅ Build successful after audit fix"
|
||||
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
|
||||
else
|
||||
echo "❌ Build failed after audit fix - will create PR with fix instructions"
|
||||
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
|
||||
git checkout -b "$branch_name"
|
||||
git add .
|
||||
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
|
||||
git push origin "$branch_name"
|
||||
|
||||
# Create PR body based on what actually happened
|
||||
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**✅ Full Fix Applied Successfully**
|
||||
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
- Verified build compatibility with `npm run all`
|
||||
|
||||
**Next steps:**
|
||||
- Review the dependency changes
|
||||
- Verify the hashFiles functionality still works as expected
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
|
||||
This update applies important security patches but causes build failures that require manual fixes.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
|
||||
**⚠️ Build Issues Detected:**
|
||||
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
|
||||
|
||||
**Required Manual Fixes:**
|
||||
1. Review TypeScript compilation errors in the build output
|
||||
2. Update TypeScript configuration if needed
|
||||
3. Consider pinning `@types/node` to a compatible version
|
||||
4. Run `npm run all` locally to verify fixes
|
||||
|
||||
**Next steps:**
|
||||
- **DO NOT merge until build issues are resolved**
|
||||
- Apply manual fixes for TypeScript compatibility
|
||||
- Test the hashFiles functionality still works as expected
|
||||
- Merge when build passes
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
else
|
||||
# Fallback case
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**ℹ️ No Changes Applied**
|
||||
No security vulnerabilities were found or no changes were needed.
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix for hashFiles dependencies" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "npm" \
|
||||
--label "typescript" \
|
||||
--label "security" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "✅ No changes to commit - npm audit fix did not modify any files"
|
||||
fi
|
||||
27
.github/workflows/release.yml
vendored
27
.github/workflows/release.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
# Make sure ./releaseVersion match ./src/runnerversion
|
||||
# Query GitHub release ensure version is not used
|
||||
- name: Check version
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: runner-packages-${{ matrix.runtime }}
|
||||
path: |
|
||||
@@ -133,37 +133,37 @@ jobs:
|
||||
|
||||
# Download runner package tar.gz/zip produced by 'build' job
|
||||
- name: Download Artifact (win-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-win-x64
|
||||
path: ./
|
||||
- name: Download Artifact (win-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-win-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-osx-x64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-osx-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-x64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-x64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-arm
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm64)
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-arm64
|
||||
path: ./
|
||||
@@ -171,7 +171,7 @@ jobs:
|
||||
# Create ReleaseNote file
|
||||
- name: Create ReleaseNote
|
||||
id: releaseNote
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
@@ -300,7 +300,7 @@ jobs:
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -334,11 +334,12 @@ jobs:
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v2
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
|
||||
2
.github/workflows/stale-bot.yml
vendored
2
.github/workflows/stale-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@v10
|
||||
with:
|
||||
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||
|
||||
@@ -1,6 +1 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
npx lint-staged
|
||||
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
|
||||
|
||||
217
docs/dependency-management.md
Normal file
217
docs/dependency-management.md
Normal file
@@ -0,0 +1,217 @@
|
||||
# Runner Dependency Management Process
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
|
||||
|
||||
## Release Schedule
|
||||
|
||||
- **Monthly Runner Releases**: New runner versions are released monthly
|
||||
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
|
||||
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
|
||||
|
||||
## Automated Workflows
|
||||
|
||||
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
|
||||
|
||||
### 1. Foundation Labels
|
||||
|
||||
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
|
||||
- **Purpose**: Creates consistent dependency labels for all automation workflows
|
||||
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
|
||||
- **Prerequisite**: Must be merged before other workflows for proper labeling
|
||||
|
||||
### 2. Node.js Version Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/node-upgrade.yml`
|
||||
- **Schedule**: Mondays at 6:00 AM UTC
|
||||
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
|
||||
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
- **Priority**: First (NPM depends on current Node.js versions)
|
||||
|
||||
### 3. NPM Security Audit
|
||||
|
||||
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
|
||||
- **Schedule**: Mondays at 7:00 AM UTC
|
||||
- **Purpose**: Automated security vulnerability detection and basic fixes
|
||||
- **Location**: `src/Misc/expressionFunc/hashFiles/`
|
||||
- **Features**: npm audit, security patch application, PR creation
|
||||
- **Dependency**: Runs after Node.js updates for optimal compatibility
|
||||
|
||||
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
|
||||
- **Trigger**: Manual dispatch only
|
||||
- **Purpose**: Manual security audit with TypeScript compatibility fixes
|
||||
- **Use Case**: When scheduled workflow fails or needs custom intervention
|
||||
- **Features**: Enhanced TypeScript auto-repair, graduated security response
|
||||
- **How to Use**:
|
||||
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
|
||||
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
|
||||
3. Click "Run workflow" and optionally specify fix level (auto/manual)
|
||||
4. Review the generated PR for TypeScript compatibility issues
|
||||
|
||||
### 4. .NET SDK Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates .NET SDK and package versions with build validation
|
||||
- **Features**: Global.json updates, NuGet package management, compatibility checking
|
||||
- **Independence**: Runs independently of Node.js/NPM updates
|
||||
|
||||
### 5. Docker/Buildx Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
|
||||
- **Features**: Container security scanning, multi-architecture build testing
|
||||
- **Independence**: Runs independently of other dependency updates
|
||||
|
||||
### 6. Dependency Monitoring
|
||||
|
||||
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
|
||||
- **Schedule**: Mondays at 11:00 AM UTC
|
||||
- **Purpose**: Comprehensive status report of all dependencies with security audit
|
||||
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
|
||||
- **Summary**: Runs last to capture results from all morning dependency updates
|
||||
|
||||
## Release Process Integration
|
||||
|
||||
### Pre-Release Checklist
|
||||
|
||||
Before each monthly runner release:
|
||||
|
||||
1. **Check Dependency PRs**:
|
||||
|
||||
```bash
|
||||
# List all open dependency PRs
|
||||
gh pr list --label "dependencies" --state open
|
||||
|
||||
# List only automated weekly dependency updates
|
||||
gh pr list --label "dependencies-weekly-check" --state open
|
||||
|
||||
# List only custom dependency automation (not dependabot)
|
||||
gh pr list --label "dependencies-not-dependabot" --state open
|
||||
```
|
||||
|
||||
2. **Run Manual Dependency Check**:
|
||||
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
|
||||
- Review the summary for any outdated dependencies
|
||||
|
||||
3. **Review and Merge Updates**:
|
||||
- Prioritize security-related updates
|
||||
- Test dependency updates in development environment
|
||||
- Merge approved dependency PRs
|
||||
|
||||
### Vulnerability Response
|
||||
|
||||
#### Critical Security Vulnerabilities
|
||||
|
||||
- **Response Time**: Within 24 hours
|
||||
- **Process**:
|
||||
1. Assess impact on runner security
|
||||
2. Create hotfix branch if runner data security is affected
|
||||
3. Expedite patch release if necessary
|
||||
4. Document in security advisory if applicable
|
||||
|
||||
#### Non-Critical Vulnerabilities
|
||||
|
||||
- **Response Time**: Next monthly release
|
||||
- **Process**:
|
||||
1. Evaluate if vulnerability affects runner functionality
|
||||
2. Include fix in regular dependency update cycle
|
||||
3. Document in release notes
|
||||
|
||||
## Monitoring and Alerts
|
||||
|
||||
### GitHub Actions Workflow Status
|
||||
|
||||
- All dependency workflows create PRs with the `dependencies` label
|
||||
- Failed workflows should be investigated immediately
|
||||
- Weekly dependency status reports are generated automatically
|
||||
|
||||
### Manual Checks
|
||||
|
||||
You can manually trigger dependency checks:
|
||||
|
||||
- **Full Status**: Run "Dependency Status Check" workflow
|
||||
- **Specific Component**: Use the dropdown to check individual dependencies
|
||||
|
||||
## Dependency Labels
|
||||
|
||||
All automated dependency PRs are tagged with labels for easy filtering and management:
|
||||
|
||||
### Primary Labels
|
||||
|
||||
- **`dependencies`**: All automated dependency-related PRs
|
||||
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
|
||||
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
|
||||
- **`security`**: Security vulnerability fixes and patches
|
||||
- **`typescript`**: TypeScript compatibility and type definition updates
|
||||
- **`needs-manual-review`**: Complex updates requiring human verification
|
||||
|
||||
### Technology-Specific Labels
|
||||
|
||||
- **`node`**: Node.js version updates
|
||||
- **`javascript`**: JavaScript runtime and tooling updates
|
||||
- **`npm`**: NPM package and security updates
|
||||
- **`dotnet`**: .NET SDK and NuGet package updates
|
||||
- **`docker`**: Docker and container tooling updates
|
||||
|
||||
### Workflow-Specific Branches
|
||||
|
||||
- **Node.js updates**: `chore/update-node` branch
|
||||
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
|
||||
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
|
||||
- **Docker updates**: `feature/docker-buildx-upgrade` branch
|
||||
|
||||
## Special Considerations
|
||||
|
||||
### Node.js Updates
|
||||
|
||||
When updating Node.js versions, remember to:
|
||||
|
||||
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
2. Follow the alpine_nodejs getting started guide
|
||||
3. Test container builds with new Node versions
|
||||
|
||||
### .NET SDK Updates
|
||||
|
||||
- Only patch versions are auto-updated within the same major.minor version
|
||||
- Major/minor version updates require manual review and testing
|
||||
|
||||
### Docker Updates
|
||||
|
||||
- Updates include both Docker Engine and Docker Buildx
|
||||
- Verify compatibility with runner container workflows
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **NPM Audit Workflow Fails**:
|
||||
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
|
||||
- Verify Node.js setup step succeeded
|
||||
|
||||
2. **Version Detection Fails**:
|
||||
- Check if upstream APIs are available
|
||||
- Verify parsing logic for version extraction
|
||||
|
||||
3. **PR Creation Fails**:
|
||||
- Ensure `GITHUB_TOKEN` has sufficient permissions
|
||||
- Check if branch already exists
|
||||
|
||||
### Contact
|
||||
|
||||
For questions about the dependency management process:
|
||||
|
||||
- Create an issue with the `dependencies` label
|
||||
- Review existing dependency management workflows
|
||||
- Consult the runner team for security-related concerns
|
||||
|
||||
## Metrics and KPIs
|
||||
|
||||
Track these metrics to measure dependency management effectiveness:
|
||||
|
||||
- Number of open dependency PRs at release time
|
||||
- Time to merge dependency updates
|
||||
- Number of security vulnerabilities by severity
|
||||
- Release cycle adherence (monthly target)
|
||||
@@ -5,8 +5,8 @@ ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||
ARG DOCKER_VERSION=28.3.2
|
||||
ARG BUILDX_VERSION=0.26.1
|
||||
ARG DOCKER_VERSION=29.0.2
|
||||
ARG BUILDX_VERSION=0.30.1
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
@@ -21,6 +21,10 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||
@@ -55,7 +59,8 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
&& usermod -aG sudo runner \
|
||||
&& usermod -aG docker runner \
|
||||
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
|
||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
|
||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
|
||||
&& chmod 777 /home/runner
|
||||
|
||||
WORKDIR /home/runner
|
||||
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
## What's Changed
|
||||
* Update Docker to v28.3.2 and Buildx to v0.26.1 by @github-actions[bot] in https://github.com/actions/runner/pull/3953
|
||||
* Fix if statement structure in update script and variable reference by @salmanmkc in https://github.com/actions/runner/pull/3956
|
||||
* Add V2 flow for runner deletion by @Samirat in https://github.com/actions/runner/pull/3954
|
||||
* Node 20 -> Node 24 migration feature flagging, opt-in and opt-out environment variables by @salmanmkc in https://github.com/actions/runner/pull/3948
|
||||
* Update Node20 and Node24 to latest by @djs-intel in https://github.com/actions/runner/pull/3972
|
||||
* Redirect supported OS doc section to current public Docs location by @corycalahan in https://github.com/actions/runner/pull/3979
|
||||
* Bump Microsoft.NET.Test.Sdk from 17.13.0 to 17.14.1 by @dependabot[bot] in https://github.com/actions/runner/pull/3975
|
||||
* Bump Azure.Storage.Blobs from 12.24.0 to 12.25.0 by @dependabot[bot] in https://github.com/actions/runner/pull/3974
|
||||
* Bump actions/download-artifact from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3973
|
||||
* Bump actions/checkout from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3982
|
||||
* Custom Image: Preflight checks by @lawrencegripper in https://github.com/actions/runner/pull/4081
|
||||
* Update dotnet sdk to latest version @8.0.415 by @github-actions[bot] in https://github.com/actions/runner/pull/4080
|
||||
* Link to an extant discussion category by @jsoref in https://github.com/actions/runner/pull/4084
|
||||
* Improve logic around decide IsHostedServer. by @TingluoHuang in https://github.com/actions/runner/pull/4086
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4093
|
||||
* Compare updated template evaluator by @ericsciple in https://github.com/actions/runner/pull/4092
|
||||
* fix(dockerfile): set more lenient permissions on /home/runner by @caxu-rh in https://github.com/actions/runner/pull/4083
|
||||
* Add support for libicu73-76 for newer Debian/Ubuntu versions by @lets-build-an-ocean in https://github.com/actions/runner/pull/4098
|
||||
* Bump actions/download-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4089
|
||||
* Bump actions/upload-artifact from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/4088
|
||||
* Bump Azure.Storage.Blobs from 12.25.1 to 12.26.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4077
|
||||
* Only start runner after network is online by @dupondje in https://github.com/actions/runner/pull/4094
|
||||
* Retry http error related to DNS resolution failure. by @TingluoHuang in https://github.com/actions/runner/pull/4110
|
||||
* Update Docker to v29.0.1 and Buildx to v0.30.0 by @github-actions[bot] in https://github.com/actions/runner/pull/4114
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4115
|
||||
* Update dotnet sdk to latest version @8.0.416 by @github-actions[bot] in https://github.com/actions/runner/pull/4116
|
||||
* Compare updated workflow parser for ActionManifestManager by @ericsciple in https://github.com/actions/runner/pull/4111
|
||||
* Bump npm pkg version for hashFiles. by @TingluoHuang in https://github.com/actions/runner/pull/4122
|
||||
|
||||
## New Contributors
|
||||
* @Samirat made their first contribution in https://github.com/actions/runner/pull/3954
|
||||
* @djs-intel made their first contribution in https://github.com/actions/runner/pull/3972
|
||||
* @lawrencegripper made their first contribution in https://github.com/actions/runner/pull/4081
|
||||
* @caxu-rh made their first contribution in https://github.com/actions/runner/pull/4083
|
||||
* @lets-build-an-ocean made their first contribution in https://github.com/actions/runner/pull/4098
|
||||
* @dupondje made their first contribution in https://github.com/actions/runner/pull/4094
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.327.1...v2.328.0
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.329.0...v2.330.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"plugins": ["@typescript-eslint", "@stylistic"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
@@ -26,7 +26,7 @@
|
||||
],
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@stylistic/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
@@ -47,8 +47,8 @@
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@stylistic/semi": ["error", "never"],
|
||||
"@stylistic/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"filenames/match-regex" : "off",
|
||||
"github/no-then" : 1, // warning
|
||||
|
||||
1348
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
1348
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,7 @@
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||
"prepare": "cd ../../../../ && husky install"
|
||||
"prepare": "cd ../../../../ && husky"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -35,16 +35,17 @@
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||
"@typescript-eslint/parser": "^6.7.2",
|
||||
"@vercel/ncc": "^0.38.0",
|
||||
"@stylistic/eslint-plugin": "^3.1.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.0",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"husky": "^8.0.3",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.5.0",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^5.2.2"
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.19.4"
|
||||
NODE24_VERSION="24.5.0"
|
||||
NODE20_VERSION="20.19.6"
|
||||
NODE24_VERSION="24.11.1"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description={{Description}}
|
||||
After=network.target
|
||||
After=network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart={{RunnerRoot}}/runsvc.sh
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ var __webpack_modules__ = ({
|
||||
|
||||
/***/ 2627:
|
||||
/***/ 4711:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -22,13 +22,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -46,15 +56,15 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
const crypto = __importStar(__nccwpck_require__(6982));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const glob = __importStar(__nccwpck_require__(7206));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const stream = __importStar(__nccwpck_require__(2203));
|
||||
const util = __importStar(__nccwpck_require__(9023));
|
||||
function run() {
|
||||
var _a, e_1, _b, _c;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
var _a, e_1, _b, _c;
|
||||
// arg0 -> node
|
||||
// arg1 -> hashFiles.js
|
||||
// env[followSymbolicLinks] = true/null
|
||||
@@ -128,7 +138,7 @@ function run() {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7351:
|
||||
/***/ 4914:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -154,8 +164,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.issue = exports.issueCommand = void 0;
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
@@ -227,7 +237,7 @@ function escapeProperty(s) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2186:
|
||||
/***/ 7484:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -262,12 +272,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||||
const command_1 = __nccwpck_require__(7351);
|
||||
const file_command_1 = __nccwpck_require__(717);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||
const command_1 = __nccwpck_require__(4914);
|
||||
const file_command_1 = __nccwpck_require__(4753);
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const oidc_utils_1 = __nccwpck_require__(5306);
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
@@ -552,17 +562,17 @@ exports.getIDToken = getIDToken;
|
||||
/**
|
||||
* Summary exports
|
||||
*/
|
||||
var summary_1 = __nccwpck_require__(1327);
|
||||
var summary_1 = __nccwpck_require__(1847);
|
||||
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
||||
/**
|
||||
* @deprecated use core.summary
|
||||
*/
|
||||
var summary_2 = __nccwpck_require__(1327);
|
||||
var summary_2 = __nccwpck_require__(1847);
|
||||
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
||||
/**
|
||||
* Path exports
|
||||
*/
|
||||
var path_utils_1 = __nccwpck_require__(2981);
|
||||
var path_utils_1 = __nccwpck_require__(1976);
|
||||
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
||||
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
||||
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
||||
@@ -570,7 +580,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 717:
|
||||
/***/ 4753:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -599,10 +609,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const uuid_1 = __nccwpck_require__(5840);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const uuid_1 = __nccwpck_require__(2048);
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
function issueFileCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
@@ -635,7 +645,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8041:
|
||||
/***/ 5306:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -651,9 +661,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.OidcClient = void 0;
|
||||
const http_client_1 = __nccwpck_require__(6255);
|
||||
const auth_1 = __nccwpck_require__(5526);
|
||||
const core_1 = __nccwpck_require__(2186);
|
||||
const http_client_1 = __nccwpck_require__(4844);
|
||||
const auth_1 = __nccwpck_require__(4552);
|
||||
const core_1 = __nccwpck_require__(7484);
|
||||
class OidcClient {
|
||||
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
||||
const requestOptions = {
|
||||
@@ -719,7 +729,7 @@ exports.OidcClient = OidcClient;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2981:
|
||||
/***/ 1976:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -745,7 +755,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
/**
|
||||
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||||
* replaced with /.
|
||||
@@ -784,7 +794,7 @@ exports.toPlatformPath = toPlatformPath;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1327:
|
||||
/***/ 1847:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -800,8 +810,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
||||
const os_1 = __nccwpck_require__(2037);
|
||||
const fs_1 = __nccwpck_require__(7147);
|
||||
const os_1 = __nccwpck_require__(857);
|
||||
const fs_1 = __nccwpck_require__(9896);
|
||||
const { access, appendFile, writeFile } = fs_1.promises;
|
||||
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
||||
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
||||
@@ -1074,7 +1084,7 @@ exports.summary = _summary;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5278:
|
||||
/***/ 302:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -1121,7 +1131,7 @@ exports.toCommandProperties = toCommandProperties;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8090:
|
||||
/***/ 7206:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1137,8 +1147,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = exports.create = void 0;
|
||||
const internal_globber_1 = __nccwpck_require__(8298);
|
||||
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||
const internal_globber_1 = __nccwpck_require__(103);
|
||||
const internal_hash_files_1 = __nccwpck_require__(3608);
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
@@ -1174,7 +1184,7 @@ exports.hashFiles = hashFiles;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1026:
|
||||
/***/ 8164:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1200,7 +1210,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getOptions = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
@@ -1236,7 +1246,7 @@ exports.getOptions = getOptions;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8298:
|
||||
/***/ 103:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1290,14 +1300,14 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.DefaultGlobber = void 0;
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(8164));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const patternHelper = __importStar(__nccwpck_require__(8891));
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const internal_pattern_1 = __nccwpck_require__(5370);
|
||||
const internal_search_state_1 = __nccwpck_require__(9890);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class DefaultGlobber {
|
||||
constructor(options) {
|
||||
@@ -1478,7 +1488,7 @@ exports.DefaultGlobber = DefaultGlobber;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2448:
|
||||
/***/ 3608:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1520,12 +1530,12 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = void 0;
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const crypto = __importStar(__nccwpck_require__(6982));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const stream = __importStar(__nccwpck_require__(2203));
|
||||
const util = __importStar(__nccwpck_require__(9023));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
@@ -1582,7 +1592,7 @@ exports.hashFiles = hashFiles;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1063:
|
||||
/***/ 2644:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -1607,7 +1617,7 @@ var MatchKind;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1849:
|
||||
/***/ 4138:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1636,8 +1646,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
@@ -1812,7 +1822,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6836:
|
||||
/***/ 6617:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1841,9 +1851,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Path = void 0;
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
@@ -1932,7 +1942,7 @@ exports.Path = Path;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9005:
|
||||
/***/ 8891:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1958,8 +1968,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
@@ -2033,7 +2043,7 @@ exports.partialMatch = partialMatch;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4536:
|
||||
/***/ 5370:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -2062,13 +2072,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Pattern = void 0;
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const minimatch_1 = __nccwpck_require__(3973);
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_path_1 = __nccwpck_require__(6836);
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const minimatch_1 = __nccwpck_require__(3772);
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const internal_path_1 = __nccwpck_require__(6617);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class Pattern {
|
||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||
@@ -2295,7 +2305,7 @@ exports.Pattern = Pattern;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9117:
|
||||
/***/ 9890:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -2313,7 +2323,7 @@ exports.SearchState = SearchState;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5526:
|
||||
/***/ 4552:
|
||||
/***/ (function(__unused_webpack_module, exports) {
|
||||
|
||||
"use strict";
|
||||
@@ -2401,7 +2411,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6255:
|
||||
/***/ 4844:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -2437,10 +2447,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||||
const http = __importStar(__nccwpck_require__(3685));
|
||||
const https = __importStar(__nccwpck_require__(5687));
|
||||
const pm = __importStar(__nccwpck_require__(9835));
|
||||
const tunnel = __importStar(__nccwpck_require__(4294));
|
||||
const http = __importStar(__nccwpck_require__(8611));
|
||||
const https = __importStar(__nccwpck_require__(5692));
|
||||
const pm = __importStar(__nccwpck_require__(4988));
|
||||
const tunnel = __importStar(__nccwpck_require__(770));
|
||||
var HttpCodes;
|
||||
(function (HttpCodes) {
|
||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||
@@ -3026,7 +3036,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9835:
|
||||
/***/ 4988:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -3115,7 +3125,7 @@ function isLoopbackAddress(host) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9417:
|
||||
/***/ 9380:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -3185,11 +3195,11 @@ function range(a, b, str) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3717:
|
||||
/***/ 4691:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
var concatMap = __nccwpck_require__(6891);
|
||||
var balanced = __nccwpck_require__(9417);
|
||||
var concatMap = __nccwpck_require__(7087);
|
||||
var balanced = __nccwpck_require__(9380);
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
@@ -3393,7 +3403,7 @@ function expand(str, isTop) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6891:
|
||||
/***/ 7087:
|
||||
/***/ ((module) => {
|
||||
|
||||
module.exports = function (xs, fn) {
|
||||
@@ -3413,19 +3423,19 @@ var isArray = Array.isArray || function (xs) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3973:
|
||||
/***/ 3772:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
|
||||
var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = __nccwpck_require__(3717)
|
||||
var expand = __nccwpck_require__(4691)
|
||||
|
||||
var plTypes = {
|
||||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||
@@ -4367,27 +4377,27 @@ function regExpEscape (s) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4294:
|
||||
/***/ 770:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
module.exports = __nccwpck_require__(4219);
|
||||
module.exports = __nccwpck_require__(218);
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4219:
|
||||
/***/ 218:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
var net = __nccwpck_require__(1808);
|
||||
var tls = __nccwpck_require__(4404);
|
||||
var http = __nccwpck_require__(3685);
|
||||
var https = __nccwpck_require__(5687);
|
||||
var events = __nccwpck_require__(2361);
|
||||
var assert = __nccwpck_require__(9491);
|
||||
var util = __nccwpck_require__(3837);
|
||||
var net = __nccwpck_require__(9278);
|
||||
var tls = __nccwpck_require__(4756);
|
||||
var http = __nccwpck_require__(8611);
|
||||
var https = __nccwpck_require__(5692);
|
||||
var events = __nccwpck_require__(4434);
|
||||
var assert = __nccwpck_require__(2613);
|
||||
var util = __nccwpck_require__(9023);
|
||||
|
||||
|
||||
exports.httpOverHttp = httpOverHttp;
|
||||
@@ -4647,7 +4657,7 @@ exports.debug = debug; // for test
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5840:
|
||||
/***/ 2048:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4711,29 +4721,29 @@ Object.defineProperty(exports, "parse", ({
|
||||
}
|
||||
}));
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(8628));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(6415));
|
||||
|
||||
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
|
||||
var _v2 = _interopRequireDefault(__nccwpck_require__(1697));
|
||||
|
||||
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
|
||||
var _v3 = _interopRequireDefault(__nccwpck_require__(4676));
|
||||
|
||||
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
|
||||
var _v4 = _interopRequireDefault(__nccwpck_require__(9771));
|
||||
|
||||
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
|
||||
var _nil = _interopRequireDefault(__nccwpck_require__(7723));
|
||||
|
||||
var _version = _interopRequireDefault(__nccwpck_require__(1595));
|
||||
var _version = _interopRequireDefault(__nccwpck_require__(5868));
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4569:
|
||||
/***/ 216:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4744,7 +4754,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4763,7 +4773,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5332:
|
||||
/***/ 7723:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4778,7 +4788,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2746:
|
||||
/***/ 7267:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4789,7 +4799,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4830,7 +4840,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 814:
|
||||
/***/ 7879:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4845,7 +4855,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 807:
|
||||
/***/ 2973:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4856,7 +4866,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = rng;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4876,7 +4886,7 @@ function rng() {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5274:
|
||||
/***/ 507:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4887,7 +4897,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4906,7 +4916,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8950:
|
||||
/***/ 7597:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4917,7 +4927,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4952,7 +4962,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8628:
|
||||
/***/ 6415:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4963,9 +4973,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5066,7 +5076,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6409:
|
||||
/***/ 1697:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5077,9 +5087,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||
|
||||
var _md = _interopRequireDefault(__nccwpck_require__(4569));
|
||||
var _md = _interopRequireDefault(__nccwpck_require__(216));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5089,7 +5099,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5998:
|
||||
/***/ 2930:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5101,9 +5111,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
exports["default"] = _default;
|
||||
exports.URL = exports.DNS = void 0;
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5174,7 +5184,7 @@ function _default(name, version, hashfunc) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5122:
|
||||
/***/ 4676:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5185,9 +5195,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5218,7 +5228,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9120:
|
||||
/***/ 9771:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5229,9 +5239,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||
|
||||
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
|
||||
var _sha = _interopRequireDefault(__nccwpck_require__(507));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5241,7 +5251,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6900:
|
||||
/***/ 6200:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5252,7 +5262,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _regex = _interopRequireDefault(__nccwpck_require__(814));
|
||||
var _regex = _interopRequireDefault(__nccwpck_require__(7879));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5265,7 +5275,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1595:
|
||||
/***/ 5868:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5276,7 +5286,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5293,7 +5303,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9491:
|
||||
/***/ 2613:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5301,7 +5311,7 @@ module.exports = require("assert");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6113:
|
||||
/***/ 6982:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5309,7 +5319,7 @@ module.exports = require("crypto");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2361:
|
||||
/***/ 4434:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5317,7 +5327,7 @@ module.exports = require("events");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7147:
|
||||
/***/ 9896:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5325,7 +5335,7 @@ module.exports = require("fs");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3685:
|
||||
/***/ 8611:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5333,7 +5343,7 @@ module.exports = require("http");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5687:
|
||||
/***/ 5692:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5341,7 +5351,7 @@ module.exports = require("https");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1808:
|
||||
/***/ 9278:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5349,7 +5359,7 @@ module.exports = require("net");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2037:
|
||||
/***/ 857:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5357,7 +5367,7 @@ module.exports = require("os");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1017:
|
||||
/***/ 6928:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5365,7 +5375,7 @@ module.exports = require("path");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2781:
|
||||
/***/ 2203:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5373,7 +5383,7 @@ module.exports = require("stream");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4404:
|
||||
/***/ 4756:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5381,7 +5391,7 @@ module.exports = require("tls");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3837:
|
||||
/***/ 9023:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5431,7 +5441,7 @@ module.exports = require("util");
|
||||
/******/ // startup
|
||||
/******/ // Load entry module and return exports
|
||||
/******/ // This entry module is referenced by other modules so it can't be inlined
|
||||
/******/ var __webpack_exports__ = __nccwpck_require__(2627);
|
||||
/******/ var __webpack_exports__ = __nccwpck_require__(4711);
|
||||
/******/ module.exports = __webpack_exports__;
|
||||
/******/
|
||||
/******/ })()
|
||||
|
||||
@@ -110,7 +110,7 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SECONDS=0
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
while [[ $SECONDS -lt $1 ]]; do
|
||||
:
|
||||
done
|
||||
|
||||
@@ -23,6 +23,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||
|
||||
Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token);
|
||||
|
||||
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
||||
|
||||
Task ForceRefreshConnection(VssCredentials credentials);
|
||||
@@ -67,10 +69,17 @@ namespace GitHub.Runner.Common
|
||||
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
||||
|
||||
|
||||
return brokerSession;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
|
||||
// No retries
|
||||
await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -53,6 +53,9 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseV2Flow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string ServerUrlV2 { get; set; }
|
||||
|
||||
@@ -61,8 +64,20 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
get
|
||||
{
|
||||
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
||||
return _isHostedServer ?? true;
|
||||
// If the value has been explicitly set, return it.
|
||||
if (_isHostedServer.HasValue)
|
||||
{
|
||||
return _isHostedServer.Value;
|
||||
}
|
||||
|
||||
// Otherwise, try to infer it from the GitHubUrl.
|
||||
if (!string.IsNullOrEmpty(GitHubUrl))
|
||||
{
|
||||
return UrlUtil.IsHostedServer(new UriBuilder(GitHubUrl));
|
||||
}
|
||||
|
||||
// Default to true since Hosted runners likely don't have this property set.
|
||||
return true;
|
||||
}
|
||||
|
||||
set
|
||||
|
||||
@@ -169,6 +169,10 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
|
||||
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
|
||||
public static readonly string ContainerActionRunnerTemp = "actions_container_action_runner_temp";
|
||||
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
|
||||
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
|
||||
public static readonly string CompareWorkflowParser = "actions_runner_compare_workflow_parser";
|
||||
}
|
||||
|
||||
// Node version migration related constants
|
||||
|
||||
@@ -30,6 +30,7 @@ namespace GitHub.Runner.Common
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken token);
|
||||
|
||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||
@@ -80,11 +81,12 @@ namespace GitHub.Runner.Common
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest(
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, cancellationToken), cancellationToken,
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, infrastructureFailureCategory, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not VssUnauthorizedException && // HTTP status 401
|
||||
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||
|
||||
@@ -70,7 +70,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
protected async Task RetryRequest(Func<Task> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxRetryAttemptsCount = 5,
|
||||
int maxAttempts = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
)
|
||||
{
|
||||
@@ -79,31 +79,31 @@ namespace GitHub.Runner.Common
|
||||
await func();
|
||||
return Unit.Value;
|
||||
}
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount, shouldRetry);
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxAttempts, shouldRetry);
|
||||
}
|
||||
|
||||
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxRetryAttemptsCount = 5,
|
||||
int maxAttempts = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
)
|
||||
{
|
||||
var retryCount = 0;
|
||||
var attempt = 0;
|
||||
while (true)
|
||||
{
|
||||
retryCount++;
|
||||
attempt++;
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await func();
|
||||
}
|
||||
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
||||
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
|
||||
catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex)))
|
||||
{
|
||||
Trace.Error("Catch exception during request");
|
||||
Trace.Error(ex);
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left.");
|
||||
await Task.Delay(backOff, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
@@ -258,7 +258,7 @@ namespace GitHub.Runner.Listener
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
_runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
@@ -291,7 +291,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
runnerStatus,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
@@ -417,6 +417,21 @@ namespace GitHub.Runner.Listener
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private bool IsGetNextMessageExceptionRetriable(Exception ex)
|
||||
{
|
||||
if (ex is TaskAgentNotFoundException ||
|
||||
|
||||
@@ -153,8 +153,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
||||
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
||||
runnerSettings.UseRunnerAdminFlow = authResult.UseRunnerAdminFlow;
|
||||
Trace.Info($"Using runner-admin flow: {runnerSettings.UseRunnerAdminFlow}");
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -211,7 +211,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string poolName = null;
|
||||
TaskAgentPool agentPool = null;
|
||||
List<TaskAgentPool> agentPools;
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
|
||||
}
|
||||
@@ -259,7 +259,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var userLabels = command.GetLabels();
|
||||
_term.WriteLine();
|
||||
List<TaskAgent> agents;
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||
}
|
||||
@@ -280,10 +280,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
@@ -291,6 +292,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -330,10 +338,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
if (runnerSettings.UseV2Flow)
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
@@ -341,6 +350,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -400,13 +416,26 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||
}
|
||||
|
||||
// allow the server to override the serverUrlV2 and useV2Flow
|
||||
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
|
||||
!string.IsNullOrEmpty(serverUrlV2))
|
||||
{
|
||||
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
|
||||
runnerSettings.ServerUrlV2 = serverUrlV2;
|
||||
}
|
||||
|
||||
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
|
||||
{
|
||||
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
|
||||
runnerSettings.UseV2Flow = useV2Flow;
|
||||
}
|
||||
|
||||
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||
|
||||
if (!runnerSettings.UseV2Flow)
|
||||
if (!runnerSettings.UseV2Flow && !runnerSettings.UseRunnerAdminFlow)
|
||||
{
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
@@ -429,20 +458,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
// allow the server to override the serverUrlV2 and useV2Flow
|
||||
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
|
||||
!string.IsNullOrEmpty(serverUrlV2))
|
||||
{
|
||||
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
|
||||
runnerSettings.ServerUrlV2 = serverUrlV2;
|
||||
}
|
||||
|
||||
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
|
||||
{
|
||||
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
|
||||
runnerSettings.UseV2Flow = useV2Flow;
|
||||
}
|
||||
|
||||
_term.WriteSection("Runner settings");
|
||||
|
||||
// We will Combine() what's stored with root. Defaults to string a relative path
|
||||
@@ -538,7 +553,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
|
||||
if (settings.UseV2Flow)
|
||||
if (settings.UseRunnerAdminFlow)
|
||||
{
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId);
|
||||
|
||||
@@ -89,7 +89,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "use_v2_flow")]
|
||||
public bool UseV2Flow { get; set; }
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
|
||||
public VssCredentials ToVssCredentials()
|
||||
{
|
||||
|
||||
@@ -1211,7 +1211,7 @@ namespace GitHub.Runner.Listener
|
||||
jobAnnotations.Add(annotation.Value);
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, CancellationToken.None);
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -32,6 +32,7 @@ namespace GitHub.Runner.Listener
|
||||
Task DeleteSessionAsync();
|
||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||
Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken);
|
||||
|
||||
Task RefreshListenerTokenAsync();
|
||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||
@@ -52,7 +53,7 @@ namespace GitHub.Runner.Listener
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
@@ -217,7 +218,7 @@ namespace GitHub.Runner.Listener
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
runnerStatus = e.Status;
|
||||
_runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
@@ -250,7 +251,7 @@ namespace GitHub.Runner.Listener
|
||||
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
|
||||
_session.SessionId,
|
||||
_lastMessageId,
|
||||
runnerStatus,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
@@ -274,7 +275,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
runnerStatus,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
@@ -437,6 +438,21 @@ namespace GitHub.Runner.Listener
|
||||
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||
{
|
||||
if (_session.EncryptionKey == null ||
|
||||
|
||||
@@ -654,22 +654,42 @@ namespace GitHub.Runner.Listener
|
||||
else
|
||||
{
|
||||
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
|
||||
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
|
||||
|
||||
// Create connection
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
// Acknowledge (best-effort)
|
||||
if (messageRef.ShouldAcknowledge) // Temporary feature flag
|
||||
{
|
||||
try
|
||||
{
|
||||
await _listener.AcknowledgeMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Best-effort acknowledge failed for request '{messageRef.RunnerRequestId}'");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
|
||||
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
|
||||
{
|
||||
// Connect
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
var creds = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
|
||||
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
// Get job message
|
||||
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Connect
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
var credsV2 = credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
var runServer = HostContext.CreateService<IRunServer>();
|
||||
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), credsV2);
|
||||
|
||||
// Get job message
|
||||
try
|
||||
{
|
||||
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageRef.BillingOwnerId, messageQueueLoopTokenSource.Token);
|
||||
@@ -698,7 +718,10 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
// Dispatch
|
||||
jobDispatcher.Run(jobRequestMessage, runOnce);
|
||||
|
||||
// Run once?
|
||||
if (runOnce)
|
||||
{
|
||||
Trace.Info("One time used runner received job message.");
|
||||
|
||||
@@ -11,6 +11,9 @@ namespace GitHub.Runner.Listener
|
||||
[DataMember(Name = "runner_request_id")]
|
||||
public string RunnerRequestId { get; set; }
|
||||
|
||||
[DataMember(Name = "should_acknowledge")]
|
||||
public bool ShouldAcknowledge { get; set; }
|
||||
|
||||
[DataMember(Name = "run_service_url")]
|
||||
public string RunServiceUrl { get; set; }
|
||||
|
||||
|
||||
@@ -111,7 +111,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||
executionContext.InfrastructureError(ex.Message);
|
||||
executionContext.InfrastructureError(ex.Message, category: "resolve_action");
|
||||
executionContext.Result = TaskResult.Failed;
|
||||
throw;
|
||||
}
|
||||
@@ -119,7 +119,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||
executionContext.InfrastructureError(ex.Message);
|
||||
executionContext.InfrastructureError(ex.Message, category: "invalid_action_download");
|
||||
executionContext.Result = TaskResult.Failed;
|
||||
throw;
|
||||
}
|
||||
@@ -378,7 +378,7 @@ namespace GitHub.Runner.Worker
|
||||
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
||||
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
|
||||
{
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||
if (File.Exists(manifestFile))
|
||||
{
|
||||
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
||||
@@ -964,7 +964,7 @@ namespace GitHub.Runner.Worker
|
||||
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
|
||||
{
|
||||
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||
ActionDefinitionData actionDefinitionData = null;
|
||||
if (File.Exists(actionManifest))
|
||||
{
|
||||
|
||||
@@ -2,29 +2,29 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Reflection;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Reflection;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Schema;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using System.Linq;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Actions.WorkflowParser;
|
||||
using GitHub.Actions.WorkflowParser.Conversion;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Schema;
|
||||
using GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens;
|
||||
using GitHub.Actions.Expressions.Data;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[ServiceLocator(Default = typeof(ActionManifestManager))]
|
||||
public interface IActionManifestManager : IRunnerService
|
||||
{
|
||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile);
|
||||
|
||||
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
DictionaryExpressionData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, ExpressionData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||
}
|
||||
@@ -50,10 +50,10 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
|
||||
}
|
||||
|
||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||
public ActionDefinitionDataNew Load(IExecutionContext executionContext, string manifestFile)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
ActionDefinitionData actionDefinition = new();
|
||||
ActionDefinitionDataNew actionDefinition = new();
|
||||
|
||||
// Clean up file name real quick
|
||||
// Instead of using Regex which can be computationally expensive,
|
||||
@@ -160,21 +160,21 @@ namespace GitHub.Runner.Worker
|
||||
return actionDefinition;
|
||||
}
|
||||
|
||||
public DictionaryContextData EvaluateCompositeOutputs(
|
||||
public DictionaryExpressionData EvaluateCompositeOutputs(
|
||||
IExecutionContext executionContext,
|
||||
TemplateToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||
{
|
||||
var result = default(DictionaryContextData);
|
||||
DictionaryExpressionData result = null;
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null);
|
||||
templateContext.Errors.Check();
|
||||
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||
result = token.ToExpressionData().AssertDictionary("composite outputs");
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
@@ -184,13 +184,13 @@ namespace GitHub.Runner.Worker
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result ?? new DictionaryContextData();
|
||||
return result ?? new DictionaryExpressionData();
|
||||
}
|
||||
|
||||
public List<string> EvaluateContainerArguments(
|
||||
IExecutionContext executionContext,
|
||||
SequenceToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||
{
|
||||
var result = new List<string>();
|
||||
|
||||
@@ -199,7 +199,7 @@ namespace GitHub.Runner.Worker
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
@@ -229,7 +229,7 @@ namespace GitHub.Runner.Worker
|
||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||
IExecutionContext executionContext,
|
||||
MappingToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
IDictionary<string, ExpressionData> extraExpressionValues)
|
||||
{
|
||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
@@ -238,7 +238,7 @@ namespace GitHub.Runner.Worker
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
@@ -281,7 +281,7 @@ namespace GitHub.Runner.Worker
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
@@ -303,7 +303,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private TemplateContext CreateTemplateContext(
|
||||
IExecutionContext executionContext,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||
IDictionary<string, ExpressionData> extraExpressionValues = null)
|
||||
{
|
||||
var result = new TemplateContext
|
||||
{
|
||||
@@ -314,13 +314,17 @@ namespace GitHub.Runner.Worker
|
||||
maxEvents: 1000000,
|
||||
maxBytes: 10 * 1024 * 1024),
|
||||
Schema = _actionManifestSchema,
|
||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||
// TODO: Switch to real tracewriter for cutover
|
||||
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
foreach (var pair in executionContext.ExpressionValues)
|
||||
{
|
||||
result.ExpressionValues[pair.Key] = pair.Value;
|
||||
// Convert old PipelineContextData to new ExpressionData
|
||||
var json = StringUtil.ConvertToJson(pair.Value, Newtonsoft.Json.Formatting.None);
|
||||
var newValue = StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.ExpressionData>(json);
|
||||
result.ExpressionValues[pair.Key] = newValue;
|
||||
}
|
||||
|
||||
// Extra expression values
|
||||
@@ -332,10 +336,19 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Expression functions from execution context
|
||||
foreach (var item in executionContext.ExpressionFunctions)
|
||||
// Expression functions
|
||||
foreach (var func in executionContext.ExpressionFunctions)
|
||||
{
|
||||
result.ExpressionFunctions.Add(item);
|
||||
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
|
||||
{
|
||||
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported in ActionManifestManager")
|
||||
};
|
||||
result.ExpressionFunctions.Add(newFunc);
|
||||
}
|
||||
|
||||
// Add the file table from the Execution Context
|
||||
@@ -368,7 +381,7 @@ namespace GitHub.Runner.Worker
|
||||
var postToken = default(StringToken);
|
||||
var postEntrypointToken = default(StringToken);
|
||||
var postIfToken = default(StringToken);
|
||||
var steps = default(List<Pipelines.Step>);
|
||||
var steps = default(List<GitHub.Actions.WorkflowParser.IStep>);
|
||||
|
||||
foreach (var run in runsMapping)
|
||||
{
|
||||
@@ -416,7 +429,7 @@ namespace GitHub.Runner.Worker
|
||||
break;
|
||||
case "steps":
|
||||
var stepsToken = run.Value.AssertSequence("steps");
|
||||
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||
steps = WorkflowTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||
templateContext.Errors.Check();
|
||||
break;
|
||||
default:
|
||||
@@ -435,7 +448,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
return new ContainerActionExecutionData()
|
||||
return new ContainerActionExecutionDataNew()
|
||||
{
|
||||
Image = imageToken.Value,
|
||||
Arguments = argsToken,
|
||||
@@ -478,11 +491,11 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
return new CompositeActionExecutionData()
|
||||
return new CompositeActionExecutionDataNew()
|
||||
{
|
||||
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||
PreSteps = new List<Pipelines.ActionStep>(),
|
||||
PostSteps = new Stack<Pipelines.ActionStep>(),
|
||||
Steps = steps,
|
||||
PreSteps = new List<GitHub.Actions.WorkflowParser.IStep>(),
|
||||
PostSteps = new Stack<GitHub.Actions.WorkflowParser.IStep>(),
|
||||
InitCondition = "always()",
|
||||
CleanupCondition = "always()",
|
||||
Outputs = outputs
|
||||
@@ -507,7 +520,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private void ConvertInputs(
|
||||
TemplateToken inputsToken,
|
||||
ActionDefinitionData actionDefinition)
|
||||
ActionDefinitionDataNew actionDefinition)
|
||||
{
|
||||
actionDefinition.Inputs = new MappingToken(null, null, null);
|
||||
var inputsMapping = inputsToken.AssertMapping("inputs");
|
||||
@@ -542,5 +555,49 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class ActionDefinitionDataNew
|
||||
{
|
||||
public string Name { get; set; }
|
||||
|
||||
public string Description { get; set; }
|
||||
|
||||
public MappingToken Inputs { get; set; }
|
||||
|
||||
public ActionExecutionData Execution { get; set; }
|
||||
|
||||
public Dictionary<String, String> Deprecated { get; set; }
|
||||
}
|
||||
|
||||
public sealed class ContainerActionExecutionDataNew : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
|
||||
|
||||
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||
|
||||
public string Image { get; set; }
|
||||
|
||||
public string EntryPoint { get; set; }
|
||||
|
||||
public SequenceToken Arguments { get; set; }
|
||||
|
||||
public MappingToken Environment { get; set; }
|
||||
|
||||
public string Pre { get; set; }
|
||||
|
||||
public string Post { get; set; }
|
||||
}
|
||||
|
||||
public sealed class CompositeActionExecutionDataNew : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
|
||||
public override bool HasPre => PreSteps.Count > 0;
|
||||
public override bool HasPost => PostSteps.Count > 0;
|
||||
public List<GitHub.Actions.WorkflowParser.IStep> PreSteps { get; set; }
|
||||
public List<GitHub.Actions.WorkflowParser.IStep> Steps { get; set; }
|
||||
public Stack<GitHub.Actions.WorkflowParser.IStep> PostSteps { get; set; }
|
||||
public MappingToken Outputs { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
546
src/Runner.Worker/ActionManifestManagerLegacy.cs
Normal file
546
src/Runner.Worker/ActionManifestManagerLegacy.cs
Normal file
@@ -0,0 +1,546 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Reflection;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Schema;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using System.Linq;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[ServiceLocator(Default = typeof(ActionManifestManagerLegacy))]
|
||||
public interface IActionManifestManagerLegacy : IRunnerService
|
||||
{
|
||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||
|
||||
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||
}
|
||||
|
||||
public sealed class ActionManifestManagerLegacy : RunnerService, IActionManifestManagerLegacy
|
||||
{
|
||||
private TemplateSchema _actionManifestSchema;
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var json = default(string);
|
||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Worker.action_yaml.json"))
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
json = streamReader.ReadToEnd();
|
||||
}
|
||||
|
||||
var objectReader = new JsonObjectReader(null, json);
|
||||
_actionManifestSchema = TemplateSchema.Load(objectReader);
|
||||
ArgUtil.NotNull(_actionManifestSchema, nameof(_actionManifestSchema));
|
||||
Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}");
|
||||
}
|
||||
|
||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
ActionDefinitionData actionDefinition = new();
|
||||
|
||||
// Clean up file name real quick
|
||||
// Instead of using Regex which can be computationally expensive,
|
||||
// we can just remove the # of characters from the fileName according to the length of the basePath
|
||||
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
|
||||
string fileRelativePath = manifestFile;
|
||||
if (manifestFile.Contains(basePath))
|
||||
{
|
||||
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var token = default(TemplateToken);
|
||||
|
||||
// Get the file ID
|
||||
var fileId = templateContext.GetFileId(fileRelativePath);
|
||||
|
||||
// Add this file to the FileTable in executionContext if it hasn't been added already
|
||||
// we use > since fileID is 1 indexed
|
||||
if (fileId > executionContext.Global.FileTable.Count)
|
||||
{
|
||||
executionContext.Global.FileTable.Add(fileRelativePath);
|
||||
}
|
||||
|
||||
// Read the file
|
||||
var fileContent = File.ReadAllText(manifestFile);
|
||||
using (var stringReader = new StringReader(fileContent))
|
||||
{
|
||||
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
|
||||
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
|
||||
}
|
||||
|
||||
var actionMapping = token.AssertMapping("action manifest root");
|
||||
var actionOutputs = default(MappingToken);
|
||||
var actionRunValueToken = default(TemplateToken);
|
||||
|
||||
foreach (var actionPair in actionMapping)
|
||||
{
|
||||
var propertyName = actionPair.Key.AssertString($"action.yml property key");
|
||||
|
||||
switch (propertyName.Value)
|
||||
{
|
||||
case "name":
|
||||
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
|
||||
break;
|
||||
|
||||
case "outputs":
|
||||
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||
break;
|
||||
|
||||
case "description":
|
||||
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
|
||||
break;
|
||||
|
||||
case "inputs":
|
||||
ConvertInputs(actionPair.Value, actionDefinition);
|
||||
break;
|
||||
|
||||
case "runs":
|
||||
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
|
||||
actionRunValueToken = actionPair.Value;
|
||||
break;
|
||||
|
||||
default:
|
||||
Trace.Info($"Ignore action property {propertyName}.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate Runs Last
|
||||
if (actionRunValueToken != null)
|
||||
{
|
||||
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
if (templateContext.Errors.Count > 0)
|
||||
{
|
||||
foreach (var error in templateContext.Errors)
|
||||
{
|
||||
Trace.Error($"Action.yml load error: {error.Message}");
|
||||
executionContext.Error(error.Message);
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Failed to load {fileRelativePath}");
|
||||
}
|
||||
|
||||
if (actionDefinition.Execution == null)
|
||||
{
|
||||
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||
}
|
||||
|
||||
return actionDefinition;
|
||||
}
|
||||
|
||||
public DictionaryContextData EvaluateCompositeOutputs(
|
||||
IExecutionContext executionContext,
|
||||
TemplateToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = default(DictionaryContextData);
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result ?? new DictionaryContextData();
|
||||
}
|
||||
|
||||
public List<string> EvaluateContainerArguments(
|
||||
IExecutionContext executionContext,
|
||||
SequenceToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = new List<string>();
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
// Sequence
|
||||
var args = evaluateResult.AssertSequence("container args");
|
||||
|
||||
foreach (var arg in args)
|
||||
{
|
||||
var str = arg.AssertString("container arg").Value;
|
||||
result.Add(str);
|
||||
Trace.Info($"Add argument {str}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||
IExecutionContext executionContext,
|
||||
MappingToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
// Mapping
|
||||
var mapping = evaluateResult.AssertMapping("container env");
|
||||
|
||||
foreach (var pair in mapping)
|
||||
{
|
||||
// Literal key
|
||||
var key = pair.Key.AssertString("container env key");
|
||||
|
||||
// Literal value
|
||||
var value = pair.Value.AssertString("container env value");
|
||||
result[key.Value] = value.Value;
|
||||
|
||||
Trace.Info($"Add env {key} = {value}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public string EvaluateDefaultInput(
|
||||
IExecutionContext executionContext,
|
||||
string inputName,
|
||||
TemplateToken token)
|
||||
{
|
||||
string result = "";
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
// String
|
||||
result = evaluateResult.AssertString($"default value for input '{inputName}'").Value;
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TemplateContext CreateTemplateContext(
|
||||
IExecutionContext executionContext,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||
{
|
||||
var result = new TemplateContext
|
||||
{
|
||||
CancellationToken = CancellationToken.None,
|
||||
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
Memory = new TemplateMemory(
|
||||
maxDepth: 100,
|
||||
maxEvents: 1000000,
|
||||
maxBytes: 10 * 1024 * 1024),
|
||||
Schema = _actionManifestSchema,
|
||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
foreach (var pair in executionContext.ExpressionValues)
|
||||
{
|
||||
result.ExpressionValues[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
// Extra expression values
|
||||
if (extraExpressionValues?.Count > 0)
|
||||
{
|
||||
foreach (var pair in extraExpressionValues)
|
||||
{
|
||||
result.ExpressionValues[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// Expression functions from execution context
|
||||
foreach (var item in executionContext.ExpressionFunctions)
|
||||
{
|
||||
result.ExpressionFunctions.Add(item);
|
||||
}
|
||||
|
||||
// Add the file table from the Execution Context
|
||||
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
|
||||
{
|
||||
result.GetFileId(executionContext.Global.FileTable[i]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private ActionExecutionData ConvertRuns(
|
||||
IExecutionContext executionContext,
|
||||
TemplateContext templateContext,
|
||||
TemplateToken inputsToken,
|
||||
String fileRelativePath,
|
||||
MappingToken outputs = null)
|
||||
{
|
||||
var runsMapping = inputsToken.AssertMapping("runs");
|
||||
var usingToken = default(StringToken);
|
||||
var imageToken = default(StringToken);
|
||||
var argsToken = default(SequenceToken);
|
||||
var entrypointToken = default(StringToken);
|
||||
var envToken = default(MappingToken);
|
||||
var mainToken = default(StringToken);
|
||||
var pluginToken = default(StringToken);
|
||||
var preToken = default(StringToken);
|
||||
var preEntrypointToken = default(StringToken);
|
||||
var preIfToken = default(StringToken);
|
||||
var postToken = default(StringToken);
|
||||
var postEntrypointToken = default(StringToken);
|
||||
var postIfToken = default(StringToken);
|
||||
var steps = default(List<Pipelines.Step>);
|
||||
|
||||
foreach (var run in runsMapping)
|
||||
{
|
||||
var runsKey = run.Key.AssertString("runs key").Value;
|
||||
switch (runsKey)
|
||||
{
|
||||
case "using":
|
||||
usingToken = run.Value.AssertString("using");
|
||||
break;
|
||||
case "image":
|
||||
imageToken = run.Value.AssertString("image");
|
||||
break;
|
||||
case "args":
|
||||
argsToken = run.Value.AssertSequence("args");
|
||||
break;
|
||||
case "entrypoint":
|
||||
entrypointToken = run.Value.AssertString("entrypoint");
|
||||
break;
|
||||
case "env":
|
||||
envToken = run.Value.AssertMapping("env");
|
||||
break;
|
||||
case "main":
|
||||
mainToken = run.Value.AssertString("main");
|
||||
break;
|
||||
case "plugin":
|
||||
pluginToken = run.Value.AssertString("plugin");
|
||||
break;
|
||||
case "post":
|
||||
postToken = run.Value.AssertString("post");
|
||||
break;
|
||||
case "post-entrypoint":
|
||||
postEntrypointToken = run.Value.AssertString("post-entrypoint");
|
||||
break;
|
||||
case "post-if":
|
||||
postIfToken = run.Value.AssertString("post-if");
|
||||
break;
|
||||
case "pre":
|
||||
preToken = run.Value.AssertString("pre");
|
||||
break;
|
||||
case "pre-entrypoint":
|
||||
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
|
||||
break;
|
||||
case "pre-if":
|
||||
preIfToken = run.Value.AssertString("pre-if");
|
||||
break;
|
||||
case "steps":
|
||||
var stepsToken = run.Value.AssertSequence("steps");
|
||||
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||
templateContext.Errors.Check();
|
||||
break;
|
||||
default:
|
||||
Trace.Info($"Ignore run property {runsKey}.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (usingToken != null)
|
||||
{
|
||||
if (string.Equals(usingToken.Value, "docker", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (string.IsNullOrEmpty(imageToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new ContainerActionExecutionData()
|
||||
{
|
||||
Image = imageToken.Value,
|
||||
Arguments = argsToken,
|
||||
EntryPoint = entrypointToken?.Value,
|
||||
Environment = envToken,
|
||||
Pre = preEntrypointToken?.Value,
|
||||
InitCondition = preIfToken?.Value ?? "always()",
|
||||
Post = postEntrypointToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new NodeJSActionExecutionData()
|
||||
{
|
||||
NodeVersion = usingToken.Value,
|
||||
Script = mainToken.Value,
|
||||
Pre = preToken?.Value,
|
||||
InitCondition = preIfToken?.Value ?? "always()",
|
||||
Post = postToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (steps == null)
|
||||
{
|
||||
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new CompositeActionExecutionData()
|
||||
{
|
||||
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||
PreSteps = new List<Pipelines.ActionStep>(),
|
||||
PostSteps = new Stack<Pipelines.ActionStep>(),
|
||||
InitCondition = "always()",
|
||||
CleanupCondition = "always()",
|
||||
Outputs = outputs
|
||||
};
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead.");
|
||||
}
|
||||
}
|
||||
else if (pluginToken != null)
|
||||
{
|
||||
return new PluginActionExecutionData()
|
||||
{
|
||||
Plugin = pluginToken.Value
|
||||
};
|
||||
}
|
||||
|
||||
throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.");
|
||||
}
|
||||
|
||||
private void ConvertInputs(
|
||||
TemplateToken inputsToken,
|
||||
ActionDefinitionData actionDefinition)
|
||||
{
|
||||
actionDefinition.Inputs = new MappingToken(null, null, null);
|
||||
var inputsMapping = inputsToken.AssertMapping("inputs");
|
||||
foreach (var input in inputsMapping)
|
||||
{
|
||||
bool hasDefault = false;
|
||||
var inputName = input.Key.AssertString("input name");
|
||||
var inputMetadata = input.Value.AssertMapping("input metadata");
|
||||
foreach (var metadata in inputMetadata)
|
||||
{
|
||||
var metadataName = metadata.Key.AssertString("input metadata").Value;
|
||||
if (string.Equals(metadataName, "default", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
hasDefault = true;
|
||||
actionDefinition.Inputs.Add(inputName, metadata.Value);
|
||||
}
|
||||
else if (string.Equals(metadataName, "deprecationMessage", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (actionDefinition.Deprecated == null)
|
||||
{
|
||||
actionDefinition.Deprecated = new Dictionary<String, String>();
|
||||
}
|
||||
var message = metadata.Value.AssertString("input deprecationMessage");
|
||||
actionDefinition.Deprecated.Add(inputName.Value, message.Value);
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasDefault)
|
||||
{
|
||||
actionDefinition.Inputs.Add(inputName, new StringToken(null, null, null, string.Empty));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
701
src/Runner.Worker/ActionManifestManagerWrapper.cs
Normal file
701
src/Runner.Worker/ActionManifestManagerWrapper.cs
Normal file
@@ -0,0 +1,701 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.WorkflowParser;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[ServiceLocator(Default = typeof(ActionManifestManagerWrapper))]
|
||||
public interface IActionManifestManagerWrapper : IRunnerService
|
||||
{
|
||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||
|
||||
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||
}
|
||||
|
||||
public sealed class ActionManifestManagerWrapper : RunnerService, IActionManifestManagerWrapper
|
||||
{
|
||||
private IActionManifestManagerLegacy _legacyManager;
|
||||
private IActionManifestManager _newManager;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_legacyManager = hostContext.GetService<IActionManifestManagerLegacy>();
|
||||
_newManager = hostContext.GetService<IActionManifestManager>();
|
||||
}
|
||||
|
||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
executionContext,
|
||||
"Load",
|
||||
() => _legacyManager.Load(executionContext, manifestFile),
|
||||
() => ConvertToLegacyActionDefinitionData(_newManager.Load(executionContext, manifestFile)),
|
||||
(legacyResult, newResult) => CompareActionDefinition(legacyResult, newResult));
|
||||
}
|
||||
|
||||
public DictionaryContextData EvaluateCompositeOutputs(
|
||||
IExecutionContext executionContext,
|
||||
TemplateToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
executionContext,
|
||||
"EvaluateCompositeOutputs",
|
||||
() => _legacyManager.EvaluateCompositeOutputs(executionContext, token, extraExpressionValues),
|
||||
() => ConvertToLegacyContextData<DictionaryContextData>(_newManager.EvaluateCompositeOutputs(executionContext, ConvertToNewToken(token), ConvertToNewExpressionValues(extraExpressionValues))),
|
||||
(legacyResult, newResult) => CompareDictionaryContextData(legacyResult, newResult));
|
||||
}
|
||||
|
||||
public List<string> EvaluateContainerArguments(
|
||||
IExecutionContext executionContext,
|
||||
SequenceToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
executionContext,
|
||||
"EvaluateContainerArguments",
|
||||
() => _legacyManager.EvaluateContainerArguments(executionContext, token, extraExpressionValues),
|
||||
() => _newManager.EvaluateContainerArguments(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.SequenceToken, ConvertToNewExpressionValues(extraExpressionValues)),
|
||||
(legacyResult, newResult) => CompareLists(legacyResult, newResult, "ContainerArguments"));
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||
IExecutionContext executionContext,
|
||||
MappingToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
executionContext,
|
||||
"EvaluateContainerEnvironment",
|
||||
() => _legacyManager.EvaluateContainerEnvironment(executionContext, token, extraExpressionValues),
|
||||
() => _newManager.EvaluateContainerEnvironment(executionContext, ConvertToNewToken(token) as GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.MappingToken, ConvertToNewExpressionValues(extraExpressionValues)),
|
||||
(legacyResult, newResult) => {
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
return CompareDictionaries(trace, legacyResult, newResult, "ContainerEnvironment");
|
||||
});
|
||||
}
|
||||
|
||||
public string EvaluateDefaultInput(
|
||||
IExecutionContext executionContext,
|
||||
string inputName,
|
||||
TemplateToken token)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
executionContext,
|
||||
"EvaluateDefaultInput",
|
||||
() => _legacyManager.EvaluateDefaultInput(executionContext, inputName, token),
|
||||
() => _newManager.EvaluateDefaultInput(executionContext, inputName, ConvertToNewToken(token)),
|
||||
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
// Conversion helper methods
|
||||
private ActionDefinitionData ConvertToLegacyActionDefinitionData(ActionDefinitionDataNew newData)
|
||||
{
|
||||
if (newData == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new ActionDefinitionData
|
||||
{
|
||||
Name = newData.Name,
|
||||
Description = newData.Description,
|
||||
Inputs = ConvertToLegacyToken<MappingToken>(newData.Inputs),
|
||||
Deprecated = newData.Deprecated,
|
||||
Execution = ConvertToLegacyExecution(newData.Execution)
|
||||
};
|
||||
}
|
||||
|
||||
private ActionExecutionData ConvertToLegacyExecution(ActionExecutionData execution)
|
||||
{
|
||||
if (execution == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Handle different execution types
|
||||
if (execution is ContainerActionExecutionDataNew containerNew)
|
||||
{
|
||||
return new ContainerActionExecutionData
|
||||
{
|
||||
Image = containerNew.Image,
|
||||
EntryPoint = containerNew.EntryPoint,
|
||||
Arguments = ConvertToLegacyToken<SequenceToken>(containerNew.Arguments),
|
||||
Environment = ConvertToLegacyToken<MappingToken>(containerNew.Environment),
|
||||
Pre = containerNew.Pre,
|
||||
Post = containerNew.Post,
|
||||
InitCondition = containerNew.InitCondition,
|
||||
CleanupCondition = containerNew.CleanupCondition
|
||||
};
|
||||
}
|
||||
else if (execution is CompositeActionExecutionDataNew compositeNew)
|
||||
{
|
||||
return new CompositeActionExecutionData
|
||||
{
|
||||
Steps = ConvertToLegacySteps(compositeNew.Steps),
|
||||
Outputs = ConvertToLegacyToken<MappingToken>(compositeNew.Outputs)
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
// For NodeJS and Plugin execution, they don't use new token types, so just return as-is
|
||||
return execution;
|
||||
}
|
||||
}
|
||||
|
||||
private List<GitHub.DistributedTask.Pipelines.ActionStep> ConvertToLegacySteps(List<GitHub.Actions.WorkflowParser.IStep> newSteps)
|
||||
{
|
||||
if (newSteps == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Serialize new steps and deserialize to old steps
|
||||
var json = StringUtil.ConvertToJson(newSteps, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<List<GitHub.DistributedTask.Pipelines.ActionStep>>(json);
|
||||
}
|
||||
|
||||
private T ConvertToLegacyToken<T>(GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newToken) where T : TemplateToken
|
||||
{
|
||||
if (newToken == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Serialize and deserialize to convert between token types
|
||||
var json = StringUtil.ConvertToJson(newToken, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<T>(json);
|
||||
}
|
||||
|
||||
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToNewToken(TemplateToken legacyToken)
|
||||
{
|
||||
if (legacyToken == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = StringUtil.ConvertToJson(legacyToken, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
|
||||
}
|
||||
|
||||
private IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData> ConvertToNewExpressionValues(IDictionary<string, PipelineContextData> legacyValues)
|
||||
{
|
||||
if (legacyValues == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = StringUtil.ConvertToJson(legacyValues, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<IDictionary<string, GitHub.Actions.Expressions.Data.ExpressionData>>(json);
|
||||
}
|
||||
|
||||
private T ConvertToLegacyContextData<T>(GitHub.Actions.Expressions.Data.ExpressionData newData) where T : PipelineContextData
|
||||
{
|
||||
if (newData == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = StringUtil.ConvertToJson(newData, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<T>(json);
|
||||
}
|
||||
|
||||
// Comparison helper methods
|
||||
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
|
||||
IExecutionContext context,
|
||||
string methodName,
|
||||
Func<TLegacy> legacyEvaluator,
|
||||
Func<TNew> newEvaluator,
|
||||
Func<TLegacy, TNew, bool> resultComparer)
|
||||
{
|
||||
// Legacy only?
|
||||
if (!((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false)
|
||||
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER"))))
|
||||
{
|
||||
return legacyEvaluator();
|
||||
}
|
||||
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
|
||||
// Legacy evaluator
|
||||
var legacyException = default(Exception);
|
||||
var legacyResult = default(TLegacy);
|
||||
try
|
||||
{
|
||||
legacyResult = legacyEvaluator();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
legacyException = ex;
|
||||
}
|
||||
|
||||
// Compare with new evaluator
|
||||
try
|
||||
{
|
||||
ArgUtil.NotNull(context, nameof(context));
|
||||
trace.Info(methodName);
|
||||
|
||||
// New evaluator
|
||||
var newException = default(Exception);
|
||||
var newResult = default(TNew);
|
||||
try
|
||||
{
|
||||
newResult = newEvaluator();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
newException = ex;
|
||||
}
|
||||
|
||||
// Compare results or exceptions
|
||||
if (legacyException != null || newException != null)
|
||||
{
|
||||
// Either one or both threw exceptions - compare them
|
||||
if (!CompareExceptions(trace, legacyException, newException))
|
||||
{
|
||||
trace.Info($"{methodName} exception mismatch");
|
||||
RecordMismatch(context, $"{methodName}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Both succeeded - compare results
|
||||
// Skip comparison if new implementation returns null (not yet implemented)
|
||||
if (newResult != null && !resultComparer(legacyResult, newResult))
|
||||
{
|
||||
trace.Info($"{methodName} mismatch");
|
||||
RecordMismatch(context, $"{methodName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
trace.Info($"Comparison failed: {ex.Message}");
|
||||
RecordComparisonError(context, $"{methodName}: {ex.Message}");
|
||||
}
|
||||
|
||||
// Re-throw legacy exception if any
|
||||
if (legacyException != null)
|
||||
{
|
||||
throw legacyException;
|
||||
}
|
||||
|
||||
return legacyResult;
|
||||
}
|
||||
|
||||
private void RecordMismatch(IExecutionContext context, string methodName)
|
||||
{
|
||||
if (!context.Global.HasActionManifestMismatch)
|
||||
{
|
||||
context.Global.HasActionManifestMismatch = true;
|
||||
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestMismatch: {methodName}" };
|
||||
context.Global.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
}
|
||||
|
||||
private void RecordComparisonError(IExecutionContext context, string errorDetails)
|
||||
{
|
||||
if (!context.Global.HasActionManifestMismatch)
|
||||
{
|
||||
context.Global.HasActionManifestMismatch = true;
|
||||
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"ActionManifestComparisonError: {errorDetails}" };
|
||||
context.Global.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
}
|
||||
|
||||
private bool CompareActionDefinition(ActionDefinitionData legacyResult, ActionDefinitionData newResult)
|
||||
{
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
if (legacyResult == null && newResult == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyResult == null || newResult == null)
|
||||
{
|
||||
trace.Info($"CompareActionDefinition mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.Name, newResult.Name, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareActionDefinition mismatch - Name differs (legacy='{legacyResult.Name}', new='{newResult.Name}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.Description, newResult.Description, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareActionDefinition mismatch - Description differs (legacy='{legacyResult.Description}', new='{newResult.Description}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Inputs token
|
||||
var legacyInputsJson = legacyResult.Inputs != null ? StringUtil.ConvertToJson(legacyResult.Inputs) : null;
|
||||
var newInputsJson = newResult.Inputs != null ? StringUtil.ConvertToJson(newResult.Inputs) : null;
|
||||
if (!string.Equals(legacyInputsJson, newInputsJson, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareActionDefinition mismatch - Inputs differ");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Deprecated
|
||||
if (!CompareDictionaries(trace, legacyResult.Deprecated, newResult.Deprecated, "Deprecated"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Execution
|
||||
if (!CompareExecution(trace, legacyResult.Execution, newResult.Execution))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareExecution(Tracing trace, ActionExecutionData legacy, ActionExecutionData newExecution)
|
||||
{
|
||||
if (legacy == null && newExecution == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacy == null || newExecution == null)
|
||||
{
|
||||
trace.Info($"CompareExecution mismatch - one is null (legacy={legacy == null}, new={newExecution == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacy.GetType() != newExecution.GetType())
|
||||
{
|
||||
trace.Info($"CompareExecution mismatch - different types (legacy={legacy.GetType().Name}, new={newExecution.GetType().Name})");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare based on type
|
||||
if (legacy is NodeJSActionExecutionData legacyNode && newExecution is NodeJSActionExecutionData newNode)
|
||||
{
|
||||
return CompareNodeJSExecution(trace, legacyNode, newNode);
|
||||
}
|
||||
else if (legacy is ContainerActionExecutionData legacyContainer && newExecution is ContainerActionExecutionData newContainer)
|
||||
{
|
||||
return CompareContainerExecution(trace, legacyContainer, newContainer);
|
||||
}
|
||||
else if (legacy is CompositeActionExecutionData legacyComposite && newExecution is CompositeActionExecutionData newComposite)
|
||||
{
|
||||
return CompareCompositeExecution(trace, legacyComposite, newComposite);
|
||||
}
|
||||
else if (legacy is PluginActionExecutionData legacyPlugin && newExecution is PluginActionExecutionData newPlugin)
|
||||
{
|
||||
return ComparePluginExecution(trace, legacyPlugin, newPlugin);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareNodeJSExecution(Tracing trace, NodeJSActionExecutionData legacy, NodeJSActionExecutionData newExecution)
|
||||
{
|
||||
if (!string.Equals(legacy.NodeVersion, newExecution.NodeVersion, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - NodeVersion differs (legacy='{legacy.NodeVersion}', new='{newExecution.NodeVersion}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.Script, newExecution.Script, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - Script differs (legacy='{legacy.Script}', new='{newExecution.Script}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.Pre, newExecution.Pre, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - Pre differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.Post, newExecution.Post, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - Post differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.InitCondition, newExecution.InitCondition, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - InitCondition differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.CleanupCondition, newExecution.CleanupCondition, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareNodeJSExecution mismatch - CleanupCondition differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareContainerExecution(Tracing trace, ContainerActionExecutionData legacy, ContainerActionExecutionData newExecution)
|
||||
{
|
||||
if (!string.Equals(legacy.Image, newExecution.Image, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareContainerExecution mismatch - Image differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacy.EntryPoint, newExecution.EntryPoint, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareContainerExecution mismatch - EntryPoint differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Arguments token
|
||||
var legacyArgsJson = legacy.Arguments != null ? StringUtil.ConvertToJson(legacy.Arguments) : null;
|
||||
var newArgsJson = newExecution.Arguments != null ? StringUtil.ConvertToJson(newExecution.Arguments) : null;
|
||||
if (!string.Equals(legacyArgsJson, newArgsJson, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareContainerExecution mismatch - Arguments differ");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Environment token
|
||||
var legacyEnvJson = legacy.Environment != null ? StringUtil.ConvertToJson(legacy.Environment) : null;
|
||||
var newEnvJson = newExecution.Environment != null ? StringUtil.ConvertToJson(newExecution.Environment) : null;
|
||||
if (!string.Equals(legacyEnvJson, newEnvJson, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareContainerExecution mismatch - Environment differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareCompositeExecution(Tracing trace, CompositeActionExecutionData legacy, CompositeActionExecutionData newExecution)
|
||||
{
|
||||
// Compare Steps
|
||||
if (legacy.Steps?.Count != newExecution.Steps?.Count)
|
||||
{
|
||||
trace.Info($"CompareCompositeExecution mismatch - Steps.Count differs (legacy={legacy.Steps?.Count}, new={newExecution.Steps?.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Outputs token
|
||||
var legacyOutputsJson = legacy.Outputs != null ? StringUtil.ConvertToJson(legacy.Outputs) : null;
|
||||
var newOutputsJson = newExecution.Outputs != null ? StringUtil.ConvertToJson(newExecution.Outputs) : null;
|
||||
if (!string.Equals(legacyOutputsJson, newOutputsJson, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareCompositeExecution mismatch - Outputs differ");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool ComparePluginExecution(Tracing trace, PluginActionExecutionData legacy, PluginActionExecutionData newExecution)
|
||||
{
|
||||
if (!string.Equals(legacy.Plugin, newExecution.Plugin, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"ComparePluginExecution mismatch - Plugin differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareDictionaryContextData(DictionaryContextData legacy, DictionaryContextData newData)
|
||||
{
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
if (legacy == null && newData == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacy == null || newData == null)
|
||||
{
|
||||
trace.Info($"CompareDictionaryContextData mismatch - one is null (legacy={legacy == null}, new={newData == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
var legacyJson = StringUtil.ConvertToJson(legacy);
|
||||
var newJson = StringUtil.ConvertToJson(newData);
|
||||
|
||||
if (!string.Equals(legacyJson, newJson, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareDictionaryContextData mismatch");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
|
||||
{
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
if (legacyList == null && newList == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyList == null || newList == null)
|
||||
{
|
||||
trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacyList.Count != newList.Count)
|
||||
{
|
||||
trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < legacyList.Count; i++)
|
||||
{
|
||||
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareDictionaries(Tracing trace, IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
|
||||
{
|
||||
if (legacyDict == null && newDict == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyDict == null || newDict == null)
|
||||
{
|
||||
trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacyDict is Dictionary<string, string> legacyTypedDict && newDict is Dictionary<string, string> newTypedDict)
|
||||
{
|
||||
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
|
||||
{
|
||||
trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (legacyDict.Count != newDict.Count)
|
||||
{
|
||||
trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var kvp in legacyDict)
|
||||
{
|
||||
if (!newDict.TryGetValue(kvp.Key, out var newValue))
|
||||
{
|
||||
trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareExceptions(Tracing trace, Exception legacyException, Exception newException)
|
||||
{
|
||||
if (legacyException == null && newException == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyException == null || newException == null)
|
||||
{
|
||||
trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare exception messages recursively (including inner exceptions)
|
||||
var legacyMessages = GetExceptionMessages(legacyException);
|
||||
var newMessages = GetExceptionMessages(newException);
|
||||
|
||||
if (legacyMessages.Count != newMessages.Count)
|
||||
{
|
||||
trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < legacyMessages.Count; i++)
|
||||
{
|
||||
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
|
||||
{
|
||||
trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private IList<string> GetExceptionMessages(Exception ex)
|
||||
{
|
||||
var trace = HostContext.GetTrace(nameof(ActionManifestManagerWrapper));
|
||||
var messages = new List<string>();
|
||||
var toProcess = new Queue<Exception>();
|
||||
toProcess.Enqueue(ex);
|
||||
int count = 0;
|
||||
|
||||
while (toProcess.Count > 0 && count < 50)
|
||||
{
|
||||
var current = toProcess.Dequeue();
|
||||
if (current == null) continue;
|
||||
|
||||
messages.Add(current.Message);
|
||||
count++;
|
||||
|
||||
// Special handling for AggregateException - enqueue all inner exceptions
|
||||
if (current is AggregateException aggregateEx)
|
||||
{
|
||||
foreach (var innerEx in aggregateEx.InnerExceptions)
|
||||
{
|
||||
if (innerEx != null && count < 50)
|
||||
{
|
||||
toProcess.Enqueue(innerEx);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (current.InnerException != null)
|
||||
{
|
||||
toProcess.Enqueue(current.InnerException);
|
||||
}
|
||||
|
||||
// Failsafe: if we have too many exceptions, stop and return what we have
|
||||
if (count >= 50)
|
||||
{
|
||||
trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -206,7 +206,7 @@ namespace GitHub.Runner.Worker
|
||||
// Merge the default inputs from the definition
|
||||
if (definition.Data?.Inputs != null)
|
||||
{
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||
foreach (var input in definition.Data.Inputs)
|
||||
{
|
||||
string key = input.Key.AssertString("action input name").Value;
|
||||
|
||||
@@ -522,6 +522,10 @@ namespace GitHub.Runner.Worker
|
||||
if (annotation != null)
|
||||
{
|
||||
stepResult.Annotations.Add(annotation.Value);
|
||||
if (annotation.Value.IsInfrastructureIssue && string.IsNullOrEmpty(Global.InfrastructureFailureCategory))
|
||||
{
|
||||
Global.InfrastructureFailureCategory = issue.Category;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1302,10 +1306,14 @@ namespace GitHub.Runner.Worker
|
||||
UpdateGlobalStepsContext();
|
||||
}
|
||||
|
||||
internal IPipelineTemplateEvaluator ToPipelineTemplateEvaluatorInternal(ObjectTemplating.ITraceWriter traceWriter = null)
|
||||
{
|
||||
return new PipelineTemplateEvaluatorWrapper(HostContext, this, traceWriter);
|
||||
}
|
||||
|
||||
private static void NoOp()
|
||||
{
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// The Error/Warning/etc methods are created as extension methods to simplify unit testing.
|
||||
@@ -1335,9 +1343,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||
public static void InfrastructureError(this IExecutionContext context, string message)
|
||||
public static void InfrastructureError(this IExecutionContext context, string message, string category = null)
|
||||
{
|
||||
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true };
|
||||
var issue = new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true, Category = category };
|
||||
context.AddIssue(issue, ExecutionContextLogOptions.Default);
|
||||
}
|
||||
|
||||
@@ -1386,8 +1394,15 @@ namespace GitHub.Runner.Worker
|
||||
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
|
||||
}
|
||||
|
||||
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
|
||||
public static IPipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
|
||||
{
|
||||
// Create wrapper?
|
||||
if ((context.Global.Variables.GetBoolean(Constants.Runner.Features.CompareWorkflowParser) ?? false) || StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_RUNNER_COMPARE_WORKFLOW_PARSER")))
|
||||
{
|
||||
return (context as ExecutionContext).ToPipelineTemplateEvaluatorInternal(traceWriter);
|
||||
}
|
||||
|
||||
// Legacy
|
||||
if (traceWriter == null)
|
||||
{
|
||||
traceWriter = context.ToTemplateTraceWriter();
|
||||
|
||||
@@ -22,4 +22,13 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class NewAlwaysFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||
{
|
||||
protected override Object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext context, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,4 +28,18 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
return jobStatus == ActionResult.Cancelled;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class NewCancelledFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||
{
|
||||
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||
return jobStatus == ActionResult.Cancelled;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,4 +39,29 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class NewFailureFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||
{
|
||||
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
|
||||
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
|
||||
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
|
||||
if (isCompositeMainStep)
|
||||
{
|
||||
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
|
||||
return actionStatus == ActionResult.Failure;
|
||||
}
|
||||
else
|
||||
{
|
||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||
return jobStatus == ActionResult.Failure;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,4 +143,137 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class NewHashFilesFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||
{
|
||||
private const int _hashFileTimeoutSeconds = 120;
|
||||
|
||||
protected sealed override Object EvaluateCore(
|
||||
GitHub.Actions.Expressions.Sdk.EvaluationContext context,
|
||||
out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var templateContext = context.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData);
|
||||
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
||||
var githubContext = githubContextData as GitHub.Actions.Expressions.Data.DictionaryExpressionData;
|
||||
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||
|
||||
if (!githubContext.TryGetValue(PipelineTemplateConstants.HostWorkspace, out var workspace))
|
||||
{
|
||||
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out workspace);
|
||||
}
|
||||
ArgUtil.NotNull(workspace, nameof(workspace));
|
||||
|
||||
var workspaceData = workspace as GitHub.Actions.Expressions.Data.StringExpressionData;
|
||||
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
||||
|
||||
string githubWorkspace = workspaceData.Value;
|
||||
|
||||
bool followSymlink = false;
|
||||
List<string> patterns = new();
|
||||
var firstParameter = true;
|
||||
foreach (var parameter in Parameters)
|
||||
{
|
||||
var parameterString = parameter.Evaluate(context).ConvertToString();
|
||||
if (firstParameter)
|
||||
{
|
||||
firstParameter = false;
|
||||
if (parameterString.StartsWith("--"))
|
||||
{
|
||||
if (string.Equals(parameterString, "--follow-symbolic-links", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
followSymlink = true;
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentOutOfRangeException($"Invalid glob option {parameterString}, avaliable option: '--follow-symbolic-links'.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patterns.Add(parameterString);
|
||||
}
|
||||
|
||||
context.Trace.Info($"Search root directory: '{githubWorkspace}'");
|
||||
context.Trace.Info($"Search pattern: '{string.Join(", ", patterns)}'");
|
||||
|
||||
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||
|
||||
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
var p = new ProcessInvoker(new NewHashFilesTrace(context.Trace));
|
||||
p.ErrorDataReceived += ((_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
context.Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Trace.Info(data.Data);
|
||||
}
|
||||
});
|
||||
|
||||
p.OutputDataReceived += ((_, data) =>
|
||||
{
|
||||
context.Trace.Info(data.Data);
|
||||
});
|
||||
|
||||
var env = new Dictionary<string, string>();
|
||||
if (followSymlink)
|
||||
{
|
||||
env["followSymbolicLinks"] = "true";
|
||||
}
|
||||
env["patterns"] = string.Join(Environment.NewLine, patterns);
|
||||
|
||||
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
|
||||
{
|
||||
try
|
||||
{
|
||||
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
|
||||
{
|
||||
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
|
||||
}
|
||||
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class NewHashFilesTrace : ITraceWriter
|
||||
{
|
||||
private GitHub.Actions.Expressions.ITraceWriter _trace;
|
||||
|
||||
public NewHashFilesTrace(GitHub.Actions.Expressions.ITraceWriter trace)
|
||||
{
|
||||
_trace = trace;
|
||||
}
|
||||
public void Info(string message)
|
||||
{
|
||||
_trace.Info(message);
|
||||
}
|
||||
|
||||
public void Verbose(string message)
|
||||
{
|
||||
_trace.Info(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,4 +39,29 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class NewSuccessFunction : GitHub.Actions.Expressions.Sdk.Function
|
||||
{
|
||||
protected sealed override object EvaluateCore(GitHub.Actions.Expressions.Sdk.EvaluationContext evaluationContext, out GitHub.Actions.Expressions.Sdk.ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var templateContext = evaluationContext.State as GitHub.Actions.WorkflowParser.ObjectTemplating.TemplateContext;
|
||||
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
|
||||
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
|
||||
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
|
||||
if (isCompositeMainStep)
|
||||
{
|
||||
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
|
||||
return actionStatus == ActionResult.Success;
|
||||
}
|
||||
else
|
||||
{
|
||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||
return jobStatus == ActionResult.Success;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,5 +11,10 @@ namespace GitHub.Runner.Worker
|
||||
var isContainerHooksPathSet = !string.IsNullOrEmpty(Environment.GetEnvironmentVariable(Constants.Hooks.ContainerHooksPath));
|
||||
return isContainerHookFeatureFlagSet && isContainerHooksPathSet;
|
||||
}
|
||||
|
||||
public static bool IsContainerActionRunnerTempEnabled(Variables variables)
|
||||
{
|
||||
return variables?.GetBoolean(Constants.Runner.Features.ContainerActionRunnerTemp) ?? false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,9 @@ namespace GitHub.Runner.Worker
|
||||
public StepsContext StepsContext { get; set; }
|
||||
public Variables Variables { get; set; }
|
||||
public bool WriteDebug { get; set; }
|
||||
public string InfrastructureFailureCategory { get; set; }
|
||||
public JObject ContainerHookState { get; set; }
|
||||
public bool HasTemplateEvaluatorMismatch { get; set; }
|
||||
public bool HasActionManifestMismatch { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,7 +187,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
if (Data.Outputs != null)
|
||||
{
|
||||
// Evaluate the outputs in the steps context to easily retrieve the values
|
||||
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var actionManifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||
|
||||
// Format ExpressionValues to Dictionary<string, PipelineContextData>
|
||||
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
@@ -135,7 +135,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
var extraExpressionValues = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
extraExpressionValues["inputs"] = inputsContext;
|
||||
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var manifestManager = HostContext.GetService<IActionManifestManagerWrapper>();
|
||||
if (Data.Arguments != null)
|
||||
{
|
||||
container.ContainerEntryPointArgs = "";
|
||||
@@ -191,11 +191,19 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory));
|
||||
|
||||
container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
|
||||
if (FeatureManager.IsContainerActionRunnerTempEnabled(ExecutionContext.Global.Variables))
|
||||
{
|
||||
container.MountVolumes.Add(new MountVolume(tempDirectory, "/github/runner_temp"));
|
||||
}
|
||||
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
|
||||
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
|
||||
container.MountVolumes.Add(new MountVolume(tempFileCommandDirectory, "/github/file_commands"));
|
||||
container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace"));
|
||||
|
||||
if (FeatureManager.IsContainerActionRunnerTempEnabled(ExecutionContext.Global.Variables))
|
||||
{
|
||||
container.AddPathTranslateMapping(tempDirectory, "/github/runner_temp");
|
||||
}
|
||||
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
|
||||
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
|
||||
container.AddPathTranslateMapping(tempFileCommandDirectory, "/github/file_commands");
|
||||
|
||||
@@ -400,6 +400,10 @@ namespace GitHub.Runner.Worker
|
||||
if (snapshotRequest != null)
|
||||
{
|
||||
var snapshotOperationProvider = HostContext.GetService<ISnapshotOperationProvider>();
|
||||
// Check that that runner is capable of taking a snapshot
|
||||
snapshotOperationProvider.RunSnapshotPreflightChecks(context);
|
||||
|
||||
// Add postjob step to write snapshot file
|
||||
jobContext.RegisterPostJobStep(new JobExtensionRunner(
|
||||
runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest),
|
||||
condition: snapshotRequest.Condition,
|
||||
|
||||
@@ -321,7 +321,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
try
|
||||
{
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, telemetry, billingOwnerId: message.BillingOwnerId, default);
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, telemetry, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: jobContext.Global.InfrastructureFailureCategory, default);
|
||||
return result;
|
||||
}
|
||||
catch (VssUnauthorizedException ex)
|
||||
|
||||
679
src/Runner.Worker/PipelineTemplateEvaluatorWrapper.cs
Normal file
679
src/Runner.Worker/PipelineTemplateEvaluatorWrapper.cs
Normal file
@@ -0,0 +1,679 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.Actions.WorkflowParser;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
internal sealed class PipelineTemplateEvaluatorWrapper : IPipelineTemplateEvaluator
|
||||
{
|
||||
private PipelineTemplateEvaluator _legacyEvaluator;
|
||||
private WorkflowTemplateEvaluator _newEvaluator;
|
||||
private IExecutionContext _context;
|
||||
private Tracing _trace;
|
||||
|
||||
public PipelineTemplateEvaluatorWrapper(
|
||||
IHostContext hostContext,
|
||||
IExecutionContext context,
|
||||
ObjectTemplating.ITraceWriter traceWriter = null)
|
||||
{
|
||||
ArgUtil.NotNull(hostContext, nameof(hostContext));
|
||||
ArgUtil.NotNull(context, nameof(context));
|
||||
_context = context;
|
||||
_trace = hostContext.GetTrace(nameof(PipelineTemplateEvaluatorWrapper));
|
||||
|
||||
if (traceWriter == null)
|
||||
{
|
||||
traceWriter = context.ToTemplateTraceWriter();
|
||||
}
|
||||
|
||||
// Legacy evaluator
|
||||
var schema = PipelineTemplateSchemaFactory.GetSchema();
|
||||
_legacyEvaluator = new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable)
|
||||
{
|
||||
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
};
|
||||
|
||||
// New evaluator
|
||||
var newTraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter();
|
||||
_newEvaluator = new WorkflowTemplateEvaluator(newTraceWriter, context.Global.FileTable, features: null)
|
||||
{
|
||||
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
};
|
||||
}
|
||||
|
||||
public bool EvaluateStepContinueOnError(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepContinueOnError",
|
||||
() => _legacyEvaluator.EvaluateStepContinueOnError(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateStepContinueOnError(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => legacyResult == newResult);
|
||||
}
|
||||
|
||||
public string EvaluateStepDisplayName(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepDisplayName",
|
||||
() => _legacyEvaluator.EvaluateStepDisplayName(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateStepName(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => string.Equals(legacyResult, newResult, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateStepEnvironment(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions,
|
||||
StringComparer keyComparer)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepEnvironment",
|
||||
() => _legacyEvaluator.EvaluateStepEnvironment(token, contextData, expressionFunctions, keyComparer),
|
||||
() => _newEvaluator.EvaluateStepEnvironment(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions), keyComparer),
|
||||
CompareStepEnvironment);
|
||||
}
|
||||
|
||||
public bool EvaluateStepIf(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions,
|
||||
IEnumerable<KeyValuePair<string, object>> expressionState)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepIf",
|
||||
() => _legacyEvaluator.EvaluateStepIf(token, contextData, expressionFunctions, expressionState),
|
||||
() => _newEvaluator.EvaluateStepIf(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions), expressionState),
|
||||
(legacyResult, newResult) => legacyResult == newResult);
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateStepInputs(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepInputs",
|
||||
() => _legacyEvaluator.EvaluateStepInputs(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateStepInputs(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "StepInputs"));
|
||||
}
|
||||
|
||||
public int EvaluateStepTimeout(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateStepTimeout",
|
||||
() => _legacyEvaluator.EvaluateStepTimeout(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateStepTimeout(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => legacyResult == newResult);
|
||||
}
|
||||
|
||||
public GitHub.DistributedTask.Pipelines.JobContainer EvaluateJobContainer(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateJobContainer",
|
||||
() => _legacyEvaluator.EvaluateJobContainer(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateJobContainer(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
CompareJobContainer);
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateJobOutput(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateJobOutput",
|
||||
() => _legacyEvaluator.EvaluateJobOutput(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateJobOutputs(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "JobOutput"));
|
||||
}
|
||||
|
||||
public TemplateToken EvaluateEnvironmentUrl(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateEnvironmentUrl",
|
||||
() => _legacyEvaluator.EvaluateEnvironmentUrl(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateJobEnvironmentUrl(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
CompareEnvironmentUrl);
|
||||
}
|
||||
|
||||
public Dictionary<string, string> EvaluateJobDefaultsRun(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateJobDefaultsRun",
|
||||
() => _legacyEvaluator.EvaluateJobDefaultsRun(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateJobDefaultsRun(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => CompareDictionaries(legacyResult, newResult, "JobDefaultsRun"));
|
||||
}
|
||||
|
||||
public IList<KeyValuePair<string, GitHub.DistributedTask.Pipelines.JobContainer>> EvaluateJobServiceContainers(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateJobServiceContainers",
|
||||
() => _legacyEvaluator.EvaluateJobServiceContainers(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateJobServiceContainers(ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
(legacyResult, newResult) => CompareJobServiceContainers(legacyResult, newResult));
|
||||
}
|
||||
|
||||
public GitHub.DistributedTask.Pipelines.Snapshot EvaluateJobSnapshotRequest(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
return EvaluateAndCompare(
|
||||
"EvaluateJobSnapshotRequest",
|
||||
() => _legacyEvaluator.EvaluateJobSnapshotRequest(token, contextData, expressionFunctions),
|
||||
() => _newEvaluator.EvaluateSnapshot(string.Empty, ConvertToken(token), ConvertData(contextData), ConvertFunctions(expressionFunctions)),
|
||||
CompareSnapshot);
|
||||
}
|
||||
|
||||
private void RecordMismatch(string methodName)
|
||||
{
|
||||
if (!_context.Global.HasTemplateEvaluatorMismatch)
|
||||
{
|
||||
_context.Global.HasTemplateEvaluatorMismatch = true;
|
||||
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"TemplateEvaluatorMismatch: {methodName}" };
|
||||
_context.Global.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
}
|
||||
|
||||
private void RecordComparisonError(string errorDetails)
|
||||
{
|
||||
if (!_context.Global.HasTemplateEvaluatorMismatch)
|
||||
{
|
||||
_context.Global.HasTemplateEvaluatorMismatch = true;
|
||||
var telemetry = new JobTelemetry { Type = JobTelemetryType.General, Message = $"TemplateEvaluatorComparisonError: {errorDetails}" };
|
||||
_context.Global.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
}
|
||||
|
||||
private TLegacy EvaluateAndCompare<TLegacy, TNew>(
|
||||
string methodName,
|
||||
Func<TLegacy> legacyEvaluator,
|
||||
Func<TNew> newEvaluator,
|
||||
Func<TLegacy, TNew, bool> resultComparer)
|
||||
{
|
||||
// Legacy evaluator
|
||||
var legacyException = default(Exception);
|
||||
var legacyResult = default(TLegacy);
|
||||
try
|
||||
{
|
||||
legacyResult = legacyEvaluator();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
legacyException = ex;
|
||||
}
|
||||
|
||||
// Compare with new evaluator
|
||||
try
|
||||
{
|
||||
ArgUtil.NotNull(_context, nameof(_context));
|
||||
ArgUtil.NotNull(_newEvaluator, nameof(_newEvaluator));
|
||||
_trace.Info(methodName);
|
||||
|
||||
// New evaluator
|
||||
var newException = default(Exception);
|
||||
var newResult = default(TNew);
|
||||
try
|
||||
{
|
||||
newResult = newEvaluator();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
newException = ex;
|
||||
}
|
||||
|
||||
// Compare results or exceptions
|
||||
if (legacyException != null || newException != null)
|
||||
{
|
||||
// Either one or both threw exceptions - compare them
|
||||
if (!CompareExceptions(legacyException, newException))
|
||||
{
|
||||
_trace.Info($"{methodName} exception mismatch");
|
||||
RecordMismatch($"{methodName}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Both succeeded - compare results
|
||||
if (!resultComparer(legacyResult, newResult))
|
||||
{
|
||||
_trace.Info($"{methodName} mismatch");
|
||||
RecordMismatch($"{methodName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Info($"Comparison failed: {ex.Message}");
|
||||
RecordComparisonError($"{methodName}: {ex.Message}");
|
||||
}
|
||||
|
||||
// Re-throw legacy exception if any
|
||||
if (legacyException != null)
|
||||
{
|
||||
throw legacyException;
|
||||
}
|
||||
|
||||
return legacyResult;
|
||||
}
|
||||
|
||||
private GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken ConvertToken(
|
||||
GitHub.DistributedTask.ObjectTemplating.Tokens.TemplateToken token)
|
||||
{
|
||||
if (token == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = StringUtil.ConvertToJson(token, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken>(json);
|
||||
}
|
||||
|
||||
private GitHub.Actions.Expressions.Data.DictionaryExpressionData ConvertData(
|
||||
GitHub.DistributedTask.Pipelines.ContextData.DictionaryContextData contextData)
|
||||
{
|
||||
if (contextData == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var json = StringUtil.ConvertToJson(contextData, Newtonsoft.Json.Formatting.None);
|
||||
return StringUtil.ConvertFromJson<GitHub.Actions.Expressions.Data.DictionaryExpressionData>(json);
|
||||
}
|
||||
|
||||
private IList<GitHub.Actions.Expressions.IFunctionInfo> ConvertFunctions(
|
||||
IList<GitHub.DistributedTask.Expressions2.IFunctionInfo> expressionFunctions)
|
||||
{
|
||||
if (expressionFunctions == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var result = new List<GitHub.Actions.Expressions.IFunctionInfo>();
|
||||
foreach (var func in expressionFunctions)
|
||||
{
|
||||
GitHub.Actions.Expressions.IFunctionInfo newFunc = func.Name switch
|
||||
{
|
||||
"always" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewAlwaysFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"cancelled" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewCancelledFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"failure" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewFailureFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"success" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewSuccessFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
"hashFiles" => new GitHub.Actions.Expressions.FunctionInfo<Expressions.NewHashFilesFunction>(func.Name, func.MinParameters, func.MaxParameters),
|
||||
_ => throw new NotSupportedException($"Expression function '{func.Name}' is not supported for conversion")
|
||||
};
|
||||
result.Add(newFunc);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private bool CompareStepEnvironment(
|
||||
Dictionary<string, string> legacyResult,
|
||||
Dictionary<string, string> newResult)
|
||||
{
|
||||
return CompareDictionaries(legacyResult, newResult, "StepEnvironment");
|
||||
}
|
||||
|
||||
private bool CompareEnvironmentUrl(
|
||||
TemplateToken legacyResult,
|
||||
GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens.TemplateToken newResult)
|
||||
{
|
||||
var legacyJson = legacyResult != null ? Newtonsoft.Json.JsonConvert.SerializeObject(legacyResult, Newtonsoft.Json.Formatting.None) : null;
|
||||
var newJson = newResult != null ? Newtonsoft.Json.JsonConvert.SerializeObject(newResult, Newtonsoft.Json.Formatting.None) : null;
|
||||
return legacyJson == newJson;
|
||||
}
|
||||
|
||||
private bool CompareJobContainer(
|
||||
GitHub.DistributedTask.Pipelines.JobContainer legacyResult,
|
||||
GitHub.Actions.WorkflowParser.JobContainer newResult)
|
||||
{
|
||||
if (legacyResult == null && newResult == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyResult == null || newResult == null)
|
||||
{
|
||||
_trace.Info($"CompareJobContainer mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.Image, newResult.Image, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareJobContainer mismatch - Image differs (legacy='{legacyResult.Image}', new='{newResult.Image}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.Options, newResult.Options, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareJobContainer mismatch - Options differs (legacy='{legacyResult.Options}', new='{newResult.Options}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!CompareDictionaries(legacyResult.Environment, newResult.Environment, "Environment"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!CompareLists(legacyResult.Volumes, newResult.Volumes, "Volumes"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!CompareLists(legacyResult.Ports, newResult.Ports, "Ports"))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!CompareCredentials(legacyResult.Credentials, newResult.Credentials))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareCredentials(
|
||||
GitHub.DistributedTask.Pipelines.ContainerRegistryCredentials legacyCreds,
|
||||
GitHub.Actions.WorkflowParser.ContainerRegistryCredentials newCreds)
|
||||
{
|
||||
if (legacyCreds == null && newCreds == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyCreds == null || newCreds == null)
|
||||
{
|
||||
_trace.Info($"CompareCredentials mismatch - one is null (legacy={legacyCreds == null}, new={newCreds == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyCreds.Username, newCreds.Username, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareCredentials mismatch - Credentials.Username differs (legacy='{legacyCreds.Username}', new='{newCreds.Username}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyCreds.Password, newCreds.Password, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareCredentials mismatch - Credentials.Password differs");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareLists(IList<string> legacyList, IList<string> newList, string fieldName)
|
||||
{
|
||||
if (legacyList == null && newList == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyList == null || newList == null)
|
||||
{
|
||||
_trace.Info($"CompareLists mismatch - {fieldName} - one is null (legacy={legacyList == null}, new={newList == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacyList.Count != newList.Count)
|
||||
{
|
||||
_trace.Info($"CompareLists mismatch - {fieldName}.Count differs (legacy={legacyList.Count}, new={newList.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < legacyList.Count; i++)
|
||||
{
|
||||
if (!string.Equals(legacyList[i], newList[i], StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareLists mismatch - {fieldName}[{i}] differs (legacy='{legacyList[i]}', new='{newList[i]}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareDictionaries(IDictionary<string, string> legacyDict, IDictionary<string, string> newDict, string fieldName)
|
||||
{
|
||||
if (legacyDict == null && newDict == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyDict == null || newDict == null)
|
||||
{
|
||||
_trace.Info($"CompareDictionaries mismatch - {fieldName} - one is null (legacy={legacyDict == null}, new={newDict == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacyDict is Dictionary<String, String> legacyTypedDict && newDict is Dictionary<String, String> newTypedDict)
|
||||
{
|
||||
if (!object.Equals(legacyTypedDict.Comparer, newTypedDict.Comparer))
|
||||
{
|
||||
_trace.Info($"CompareDictionaries mismatch - {fieldName} - different comparers (legacy={legacyTypedDict.Comparer.GetType().Name}, new={newTypedDict.Comparer.GetType().Name})");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (legacyDict.Count != newDict.Count)
|
||||
{
|
||||
_trace.Info($"CompareDictionaries mismatch - {fieldName}.Count differs (legacy={legacyDict.Count}, new={newDict.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var kvp in legacyDict)
|
||||
{
|
||||
if (!newDict.TryGetValue(kvp.Key, out var newValue))
|
||||
{
|
||||
_trace.Info($"CompareDictionaries mismatch - {fieldName} - key '{kvp.Key}' missing in new result");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(kvp.Value, newValue, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareDictionaries mismatch - {fieldName}['{kvp.Key}'] differs (legacy='{kvp.Value}', new='{newValue}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareJobServiceContainers(
|
||||
IList<KeyValuePair<string, GitHub.DistributedTask.Pipelines.JobContainer>> legacyResult,
|
||||
IList<KeyValuePair<string, GitHub.Actions.WorkflowParser.JobContainer>> newResult)
|
||||
{
|
||||
if (legacyResult == null && newResult == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyResult == null || newResult == null)
|
||||
{
|
||||
_trace.Info($"CompareJobServiceContainers mismatch - one result is null (legacy={legacyResult == null}, new={newResult == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (legacyResult.Count != newResult.Count)
|
||||
{
|
||||
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers.Count differs (legacy={legacyResult.Count}, new={newResult.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < legacyResult.Count; i++)
|
||||
{
|
||||
var legacyKvp = legacyResult[i];
|
||||
var newKvp = newResult[i];
|
||||
|
||||
if (!string.Equals(legacyKvp.Key, newKvp.Key, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers[{i}].Key differs (legacy='{legacyKvp.Key}', new='{newKvp.Key}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!CompareJobContainer(legacyKvp.Value, newKvp.Value))
|
||||
{
|
||||
_trace.Info($"CompareJobServiceContainers mismatch - ServiceContainers['{legacyKvp.Key}']");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareSnapshot(
|
||||
GitHub.DistributedTask.Pipelines.Snapshot legacyResult,
|
||||
GitHub.Actions.WorkflowParser.Snapshot newResult)
|
||||
{
|
||||
if (legacyResult == null && newResult == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyResult == null || newResult == null)
|
||||
{
|
||||
_trace.Info($"CompareSnapshot mismatch - one is null (legacy={legacyResult == null}, new={newResult == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.ImageName, newResult.ImageName, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareSnapshot mismatch - Snapshot.ImageName differs (legacy='{legacyResult.ImageName}', new='{newResult.ImageName}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(legacyResult.Version, newResult.Version, StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareSnapshot mismatch - Snapshot.Version differs (legacy='{legacyResult.Version}', new='{newResult.Version}')");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare Condition (legacy) vs If (new)
|
||||
// Legacy has Condition as string, new has If as BasicExpressionToken
|
||||
// For comparison, we'll serialize the If token and compare with Condition
|
||||
var newIfValue = newResult.If != null ? Newtonsoft.Json.JsonConvert.SerializeObject(newResult.If, Newtonsoft.Json.Formatting.None) : null;
|
||||
|
||||
// Legacy Condition is a string expression like "success()"
|
||||
// New If is a BasicExpressionToken that needs to be serialized
|
||||
// We'll do a basic comparison - if both are null/empty or both exist
|
||||
var legacyHasCondition = !string.IsNullOrEmpty(legacyResult.Condition);
|
||||
var newHasIf = newResult.If != null;
|
||||
|
||||
if (legacyHasCondition != newHasIf)
|
||||
{
|
||||
_trace.Info($"CompareSnapshot mismatch - condition/if presence differs (legacy has condition={legacyHasCondition}, new has if={newHasIf})");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool CompareExceptions(Exception legacyException, Exception newException)
|
||||
{
|
||||
if (legacyException == null && newException == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (legacyException == null || newException == null)
|
||||
{
|
||||
_trace.Info($"CompareExceptions mismatch - one exception is null (legacy={legacyException == null}, new={newException == null})");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compare exception messages recursively (including inner exceptions)
|
||||
var legacyMessages = GetExceptionMessages(legacyException);
|
||||
var newMessages = GetExceptionMessages(newException);
|
||||
|
||||
if (legacyMessages.Count != newMessages.Count)
|
||||
{
|
||||
_trace.Info($"CompareExceptions mismatch - different number of exception messages (legacy={legacyMessages.Count}, new={newMessages.Count})");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < legacyMessages.Count; i++)
|
||||
{
|
||||
if (!string.Equals(legacyMessages[i], newMessages[i], StringComparison.Ordinal))
|
||||
{
|
||||
_trace.Info($"CompareExceptions mismatch - exception messages differ at level {i} (legacy='{legacyMessages[i]}', new='{newMessages[i]}')");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private IList<string> GetExceptionMessages(Exception ex)
|
||||
{
|
||||
var messages = new List<string>();
|
||||
var toProcess = new Queue<Exception>();
|
||||
toProcess.Enqueue(ex);
|
||||
int count = 0;
|
||||
|
||||
while (toProcess.Count > 0 && count < 50)
|
||||
{
|
||||
var current = toProcess.Dequeue();
|
||||
if (current == null) continue;
|
||||
|
||||
messages.Add(current.Message);
|
||||
count++;
|
||||
|
||||
// Special handling for AggregateException - enqueue all inner exceptions
|
||||
if (current is AggregateException aggregateEx)
|
||||
{
|
||||
foreach (var innerEx in aggregateEx.InnerExceptions)
|
||||
{
|
||||
if (innerEx != null && count < 50)
|
||||
{
|
||||
toProcess.Enqueue(innerEx);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (current.InnerException != null)
|
||||
{
|
||||
toProcess.Enqueue(current.InnerException);
|
||||
}
|
||||
|
||||
// Failsafe: if we have too many exceptions, stop and return what we have
|
||||
if (count >= 50)
|
||||
{
|
||||
_trace.Info("CompareExceptions failsafe triggered - too many exceptions (50+)");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,19 @@
|
||||
#nullable enable
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
namespace GitHub.Runner.Worker;
|
||||
|
||||
[ServiceLocator(Default = typeof(SnapshotOperationProvider))]
|
||||
public interface ISnapshotOperationProvider : IRunnerService
|
||||
{
|
||||
Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest);
|
||||
void RunSnapshotPreflightChecks(IExecutionContext jobContext);
|
||||
}
|
||||
|
||||
public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider
|
||||
@@ -24,9 +28,32 @@ public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvid
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath);
|
||||
executionContext.Output($"Image Name: {snapshotRequest.ImageName} Version: {snapshotRequest.Version}");
|
||||
executionContext.Output($"Request written to: {snapshotRequestFilePath}");
|
||||
executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job.");
|
||||
executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner.");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void RunSnapshotPreflightChecks(IExecutionContext context)
|
||||
{
|
||||
var shouldCheckRunnerEnvironment = context.Global.Variables.GetBoolean(Constants.Runner.Features.SnapshotPreflightHostedRunnerCheck) ?? false;
|
||||
if (shouldCheckRunnerEnvironment &&
|
||||
context.Global.Variables.TryGetValue(WellKnownDistributedTaskVariables.RunnerEnvironment, out var runnerEnvironment) &&
|
||||
!string.IsNullOrEmpty(runnerEnvironment))
|
||||
{
|
||||
context.Debug($"Snapshot: RUNNER_ENVIRONMENT={runnerEnvironment}");
|
||||
if (!string.Equals(runnerEnvironment, "github-hosted", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new ArgumentException("Snapshot workflows must be run on a GitHub Hosted Runner");
|
||||
}
|
||||
}
|
||||
var imageGenEnabled = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_IMAGE_GEN_ENABLED"));
|
||||
context.Debug($"Snapshot: GITHUB_ACTIONS_IMAGE_GEN_ENABLED={imageGenEnabled}");
|
||||
var shouldCheckImageGenPool = context.Global.Variables.GetBoolean(Constants.Runner.Features.SnapshotPreflightImageGenPoolCheck) ?? false;
|
||||
if (shouldCheckImageGenPool && !imageGenEnabled)
|
||||
{
|
||||
throw new ArgumentException("Snapshot workflows must be run a hosted runner with Image Generation enabled");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
using GitHub.Services.Common.Diagnostics;
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Sockets;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Services.Common.Diagnostics;
|
||||
using GitHub.Services.Common.Internal;
|
||||
|
||||
namespace GitHub.Services.Common
|
||||
|
||||
@@ -146,6 +146,7 @@ namespace GitHub.Services.Common
|
||||
sockEx.SocketErrorCode == SocketError.TimedOut ||
|
||||
sockEx.SocketErrorCode == SocketError.HostDown ||
|
||||
sockEx.SocketErrorCode == SocketError.HostUnreachable ||
|
||||
sockEx.SocketErrorCode == SocketError.HostNotFound ||
|
||||
sockEx.SocketErrorCode == SocketError.TryAgain)
|
||||
{
|
||||
return true;
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
|
||||
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
|
||||
/// </summary>
|
||||
public interface IPipelineTemplateEvaluator
|
||||
{
|
||||
Boolean EvaluateStepContinueOnError(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
String EvaluateStepDisplayName(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
Dictionary<String, String> EvaluateStepEnvironment(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions,
|
||||
StringComparer keyComparer);
|
||||
|
||||
Boolean EvaluateStepIf(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions,
|
||||
IEnumerable<KeyValuePair<String, Object>> expressionState);
|
||||
|
||||
Dictionary<String, String> EvaluateStepInputs(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
Int32 EvaluateStepTimeout(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
JobContainer EvaluateJobContainer(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
Dictionary<String, String> EvaluateJobOutput(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
TemplateToken EvaluateEnvironmentUrl(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
Dictionary<String, String> EvaluateJobDefaultsRun(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
|
||||
Snapshot EvaluateJobSnapshotRequest(
|
||||
TemplateToken token,
|
||||
DictionaryContextData contextData,
|
||||
IList<IFunctionInfo> expressionFunctions);
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
|
||||
/// </summary>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
public class PipelineTemplateEvaluator
|
||||
public class PipelineTemplateEvaluator : IPipelineTemplateEvaluator
|
||||
{
|
||||
public PipelineTemplateEvaluator(
|
||||
ITraceWriter trace,
|
||||
|
||||
@@ -18,6 +18,16 @@ namespace GitHub.DistributedTask.WebApi
|
||||
internal set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The url to refresh tokens with legacy service
|
||||
/// </summary>
|
||||
[JsonProperty("legacy_authorization_url")]
|
||||
public Uri LegacyAuthorizationUrl
|
||||
{
|
||||
get;
|
||||
internal set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The url to connect to poll for messages
|
||||
/// </summary>
|
||||
|
||||
111
src/Sdk/Expressions/Data/ArrayExpressionData.cs
Normal file
111
src/Sdk/Expressions/Data/ArrayExpressionData.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
[JsonObject]
|
||||
public sealed class ArrayExpressionData : ExpressionData, IEnumerable<ExpressionData>, IReadOnlyArray
|
||||
{
|
||||
public ArrayExpressionData()
|
||||
: base(ExpressionDataType.Array)
|
||||
{
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
public Int32 Count => m_items?.Count ?? 0;
|
||||
|
||||
public ExpressionData this[Int32 index] => m_items[index];
|
||||
|
||||
Object IReadOnlyArray.this[Int32 index] => m_items[index];
|
||||
|
||||
public void Add(ExpressionData item)
|
||||
{
|
||||
if (m_items == null)
|
||||
{
|
||||
m_items = new List<ExpressionData>();
|
||||
}
|
||||
|
||||
m_items.Add(item);
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
var result = new ArrayExpressionData();
|
||||
if (m_items?.Count > 0)
|
||||
{
|
||||
result.m_items = new List<ExpressionData>(m_items.Count);
|
||||
foreach (var item in m_items)
|
||||
{
|
||||
result.m_items.Add(item);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
var result = new JArray();
|
||||
if (m_items?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_items)
|
||||
{
|
||||
result.Add(item?.ToJToken() ?? JValue.CreateNull());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public IEnumerator<ExpressionData> GetEnumerator()
|
||||
{
|
||||
if (m_items?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_items)
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IEnumerable.GetEnumerator()
|
||||
{
|
||||
if (m_items?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_items)
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IReadOnlyArray.GetEnumerator()
|
||||
{
|
||||
if (m_items?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_items)
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[OnSerializing]
|
||||
private void OnSerializing(StreamingContext context)
|
||||
{
|
||||
if (m_items?.Count == 0)
|
||||
{
|
||||
m_items = null;
|
||||
}
|
||||
}
|
||||
|
||||
[DataMember(Name = "a", EmitDefaultValue = false)]
|
||||
private List<ExpressionData> m_items;
|
||||
}
|
||||
}
|
||||
58
src/Sdk/Expressions/Data/BooleanExpressionData.cs
Normal file
58
src/Sdk/Expressions/Data/BooleanExpressionData.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
public sealed class BooleanExpressionData : ExpressionData, IBoolean
|
||||
{
|
||||
public BooleanExpressionData(Boolean value)
|
||||
: base(ExpressionDataType.Boolean)
|
||||
{
|
||||
m_value = value;
|
||||
}
|
||||
|
||||
public Boolean Value
|
||||
{
|
||||
get
|
||||
{
|
||||
return m_value;
|
||||
}
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
return new BooleanExpressionData(m_value);
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
return (JToken)m_value;
|
||||
}
|
||||
|
||||
public override String ToString()
|
||||
{
|
||||
return m_value ? "true" : "false";
|
||||
}
|
||||
|
||||
Boolean IBoolean.GetBoolean()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
|
||||
public static implicit operator Boolean(BooleanExpressionData data)
|
||||
{
|
||||
return data.Value;
|
||||
}
|
||||
|
||||
public static implicit operator BooleanExpressionData(Boolean data)
|
||||
{
|
||||
return new BooleanExpressionData(data);
|
||||
}
|
||||
|
||||
[DataMember(Name = "b", EmitDefaultValue = false)]
|
||||
private Boolean m_value;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,289 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
[JsonObject]
|
||||
public class CaseSensitiveDictionaryExpressionData : ExpressionData, IEnumerable<KeyValuePair<String, ExpressionData>>, IReadOnlyObject
|
||||
{
|
||||
public CaseSensitiveDictionaryExpressionData()
|
||||
: base(ExpressionDataType.CaseSensitiveDictionary)
|
||||
{
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
public Int32 Count => m_list?.Count ?? 0;
|
||||
|
||||
[IgnoreDataMember]
|
||||
public IEnumerable<String> Keys
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Key;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
public IEnumerable<ExpressionData> Values
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerable<Object> IReadOnlyObject.Values
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Dictionary<String, Int32> IndexLookup
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_indexLookup == null)
|
||||
{
|
||||
m_indexLookup = new Dictionary<String, Int32>(StringComparer.Ordinal);
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
for (var i = 0; i < m_list.Count; i++)
|
||||
{
|
||||
var pair = m_list[i];
|
||||
m_indexLookup.Add(pair.Key, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m_indexLookup;
|
||||
}
|
||||
}
|
||||
|
||||
private List<DictionaryExpressionDataPair> List
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list == null)
|
||||
{
|
||||
m_list = new List<DictionaryExpressionDataPair>();
|
||||
}
|
||||
|
||||
return m_list;
|
||||
}
|
||||
}
|
||||
|
||||
public ExpressionData this[String key]
|
||||
{
|
||||
get
|
||||
{
|
||||
var index = IndexLookup[key];
|
||||
return m_list[index].Value;
|
||||
}
|
||||
|
||||
set
|
||||
{
|
||||
// Existing
|
||||
if (IndexLookup.TryGetValue(key, out var index))
|
||||
{
|
||||
key = m_list[index].Key; // preserve casing
|
||||
m_list[index] = new DictionaryExpressionDataPair(key, value);
|
||||
}
|
||||
// New
|
||||
else
|
||||
{
|
||||
Add(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Object IReadOnlyObject.this[String key]
|
||||
{
|
||||
get
|
||||
{
|
||||
var index = IndexLookup[key];
|
||||
return m_list[index].Value;
|
||||
}
|
||||
}
|
||||
|
||||
internal KeyValuePair<String, ExpressionData> this[Int32 index]
|
||||
{
|
||||
get
|
||||
{
|
||||
var pair = m_list[index];
|
||||
return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Add(IEnumerable<KeyValuePair<String, ExpressionData>> pairs)
|
||||
{
|
||||
foreach (var pair in pairs)
|
||||
{
|
||||
Add(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Add(
|
||||
String key,
|
||||
ExpressionData value)
|
||||
{
|
||||
IndexLookup.Add(key, m_list?.Count ?? 0);
|
||||
List.Add(new DictionaryExpressionDataPair(key, value));
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
var result = new CaseSensitiveDictionaryExpressionData();
|
||||
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
result.m_list = new List<DictionaryExpressionDataPair>(m_list.Count);
|
||||
foreach (var item in m_list)
|
||||
{
|
||||
result.m_list.Add(new DictionaryExpressionDataPair(item.Key, item.Value?.Clone()));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
var json = new JObject();
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_list)
|
||||
{
|
||||
json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull());
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
public Boolean ContainsKey(String key)
|
||||
{
|
||||
return TryGetValue(key, out _);
|
||||
}
|
||||
|
||||
public IEnumerator<KeyValuePair<String, ExpressionData>> GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IEnumerable.GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IReadOnlyObject.GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, Object>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean TryGetValue(
|
||||
String key,
|
||||
out ExpressionData value)
|
||||
{
|
||||
if (m_list?.Count > 0 &&
|
||||
IndexLookup.TryGetValue(key, out var index))
|
||||
{
|
||||
value = m_list[index].Value;
|
||||
return true;
|
||||
}
|
||||
|
||||
value = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
Boolean IReadOnlyObject.TryGetValue(
|
||||
String key,
|
||||
out Object value)
|
||||
{
|
||||
if (TryGetValue(key, out ExpressionData data))
|
||||
{
|
||||
value = data;
|
||||
return true;
|
||||
}
|
||||
|
||||
value = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
[OnSerializing]
|
||||
private void OnSerializing(StreamingContext context)
|
||||
{
|
||||
if (m_list?.Count == 0)
|
||||
{
|
||||
m_list = null;
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
private sealed class DictionaryExpressionDataPair
|
||||
{
|
||||
public DictionaryExpressionDataPair(
|
||||
String key,
|
||||
ExpressionData value)
|
||||
{
|
||||
Key = key;
|
||||
Value = value;
|
||||
}
|
||||
|
||||
[DataMember(Name = "k")]
|
||||
public readonly String Key;
|
||||
|
||||
[DataMember(Name = "v")]
|
||||
public readonly ExpressionData Value;
|
||||
}
|
||||
|
||||
private Dictionary<String, Int32> m_indexLookup;
|
||||
|
||||
[DataMember(Name = "d", EmitDefaultValue = false)]
|
||||
private List<DictionaryExpressionDataPair> m_list;
|
||||
}
|
||||
}
|
||||
289
src/Sdk/Expressions/Data/DictionaryExpressionData.cs
Normal file
289
src/Sdk/Expressions/Data/DictionaryExpressionData.cs
Normal file
@@ -0,0 +1,289 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
[JsonObject]
|
||||
public class DictionaryExpressionData : ExpressionData, IEnumerable<KeyValuePair<String, ExpressionData>>, IReadOnlyObject
|
||||
{
|
||||
public DictionaryExpressionData()
|
||||
: base(ExpressionDataType.Dictionary)
|
||||
{
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
public Int32 Count => m_list?.Count ?? 0;
|
||||
|
||||
[IgnoreDataMember]
|
||||
public IEnumerable<String> Keys
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Key;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
public IEnumerable<ExpressionData> Values
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerable<Object> IReadOnlyObject.Values
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Dictionary<String, Int32> IndexLookup
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_indexLookup == null)
|
||||
{
|
||||
m_indexLookup = new Dictionary<String, Int32>(StringComparer.OrdinalIgnoreCase);
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
for (var i = 0; i < m_list.Count; i++)
|
||||
{
|
||||
var pair = m_list[i];
|
||||
m_indexLookup.Add(pair.Key, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m_indexLookup;
|
||||
}
|
||||
}
|
||||
|
||||
private List<DictionaryExpressionDataPair> List
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_list == null)
|
||||
{
|
||||
m_list = new List<DictionaryExpressionDataPair>();
|
||||
}
|
||||
|
||||
return m_list;
|
||||
}
|
||||
}
|
||||
|
||||
public ExpressionData this[String key]
|
||||
{
|
||||
get
|
||||
{
|
||||
var index = IndexLookup[key];
|
||||
return m_list[index].Value;
|
||||
}
|
||||
|
||||
set
|
||||
{
|
||||
// Existing
|
||||
if (IndexLookup.TryGetValue(key, out var index))
|
||||
{
|
||||
key = m_list[index].Key; // preserve casing
|
||||
m_list[index] = new DictionaryExpressionDataPair(key, value);
|
||||
}
|
||||
// New
|
||||
else
|
||||
{
|
||||
Add(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Object IReadOnlyObject.this[String key]
|
||||
{
|
||||
get
|
||||
{
|
||||
var index = IndexLookup[key];
|
||||
return m_list[index].Value;
|
||||
}
|
||||
}
|
||||
|
||||
internal KeyValuePair<String, ExpressionData> this[Int32 index]
|
||||
{
|
||||
get
|
||||
{
|
||||
var pair = m_list[index];
|
||||
return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Add(IEnumerable<KeyValuePair<String, ExpressionData>> pairs)
|
||||
{
|
||||
foreach (var pair in pairs)
|
||||
{
|
||||
Add(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Add(
|
||||
String key,
|
||||
ExpressionData value)
|
||||
{
|
||||
IndexLookup.Add(key, m_list?.Count ?? 0);
|
||||
List.Add(new DictionaryExpressionDataPair(key, value));
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
var result = new DictionaryExpressionData();
|
||||
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
result.m_list = new List<DictionaryExpressionDataPair>(m_list.Count);
|
||||
foreach (var item in m_list)
|
||||
{
|
||||
result.m_list.Add(new DictionaryExpressionDataPair(item.Key, item.Value?.Clone()));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
var json = new JObject();
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var item in m_list)
|
||||
{
|
||||
json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull());
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
||||
public Boolean ContainsKey(String key)
|
||||
{
|
||||
return TryGetValue(key, out _);
|
||||
}
|
||||
|
||||
public IEnumerator<KeyValuePair<String, ExpressionData>> GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IEnumerable.GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, ExpressionData>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IEnumerator IReadOnlyObject.GetEnumerator()
|
||||
{
|
||||
if (m_list?.Count > 0)
|
||||
{
|
||||
foreach (var pair in m_list)
|
||||
{
|
||||
yield return new KeyValuePair<String, Object>(pair.Key, pair.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean TryGetValue(
|
||||
String key,
|
||||
out ExpressionData value)
|
||||
{
|
||||
if (m_list?.Count > 0 &&
|
||||
IndexLookup.TryGetValue(key, out var index))
|
||||
{
|
||||
value = m_list[index].Value;
|
||||
return true;
|
||||
}
|
||||
|
||||
value = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
Boolean IReadOnlyObject.TryGetValue(
|
||||
String key,
|
||||
out Object value)
|
||||
{
|
||||
if (TryGetValue(key, out ExpressionData data))
|
||||
{
|
||||
value = data;
|
||||
return true;
|
||||
}
|
||||
|
||||
value = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
[OnSerializing]
|
||||
private void OnSerializing(StreamingContext context)
|
||||
{
|
||||
if (m_list?.Count == 0)
|
||||
{
|
||||
m_list = null;
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
private sealed class DictionaryExpressionDataPair
|
||||
{
|
||||
public DictionaryExpressionDataPair(
|
||||
String key,
|
||||
ExpressionData value)
|
||||
{
|
||||
Key = key;
|
||||
Value = value;
|
||||
}
|
||||
|
||||
[DataMember(Name = "k")]
|
||||
public readonly String Key;
|
||||
|
||||
[DataMember(Name = "v")]
|
||||
public readonly ExpressionData Value;
|
||||
}
|
||||
|
||||
private Dictionary<String, Int32> m_indexLookup;
|
||||
|
||||
[DataMember(Name = "d", EmitDefaultValue = false)]
|
||||
private List<DictionaryExpressionDataPair> m_list;
|
||||
}
|
||||
}
|
||||
27
src/Sdk/Expressions/Data/ExpressionData.cs
Normal file
27
src/Sdk/Expressions/Data/ExpressionData.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
/// <summary>
|
||||
/// Base class for all template tokens
|
||||
/// </summary>
|
||||
[DataContract]
|
||||
[JsonConverter(typeof(ExpressionDataJsonConverter))]
|
||||
public abstract class ExpressionData
|
||||
{
|
||||
protected ExpressionData(Int32 type)
|
||||
{
|
||||
Type = type;
|
||||
}
|
||||
|
||||
[DataMember(Name = "t", EmitDefaultValue = false)]
|
||||
internal Int32 Type { get; }
|
||||
|
||||
public abstract ExpressionData Clone();
|
||||
|
||||
public abstract JToken ToJToken();
|
||||
}
|
||||
}
|
||||
156
src/Sdk/Expressions/Data/ExpressionDataExtensions.cs
Normal file
156
src/Sdk/Expressions/Data/ExpressionDataExtensions.cs
Normal file
@@ -0,0 +1,156 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
public static class ExpressionDataExtensions
|
||||
{
|
||||
public static ArrayExpressionData AssertArray(
|
||||
this ExpressionData value,
|
||||
String objectDescription)
|
||||
{
|
||||
if (value is ArrayExpressionData array)
|
||||
{
|
||||
return array;
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(ArrayExpressionData)}' was expected.");
|
||||
}
|
||||
|
||||
public static DictionaryExpressionData AssertDictionary(
|
||||
this ExpressionData value,
|
||||
String objectDescription)
|
||||
{
|
||||
if (value is DictionaryExpressionData dictionary)
|
||||
{
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(DictionaryExpressionData)}' was expected.");
|
||||
}
|
||||
|
||||
public static StringExpressionData AssertString(
|
||||
this ExpressionData value,
|
||||
String objectDescription)
|
||||
{
|
||||
if (value is StringExpressionData str)
|
||||
{
|
||||
return str;
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(StringExpressionData)}' was expected.");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns all context data objects (depth first)
|
||||
/// </summary>
|
||||
public static IEnumerable<ExpressionData> Traverse(this ExpressionData value)
|
||||
{
|
||||
return Traverse(value, omitKeys: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns all context data objects (depth first)
|
||||
/// </summary>
|
||||
/// <param name="omitKeys">If true, dictionary keys are omitted</param>
|
||||
public static IEnumerable<ExpressionData> Traverse(
|
||||
this ExpressionData value,
|
||||
Boolean omitKeys)
|
||||
{
|
||||
yield return value;
|
||||
|
||||
if (value is ArrayExpressionData || value is DictionaryExpressionData)
|
||||
{
|
||||
var state = new TraversalState(null, value);
|
||||
while (state != null)
|
||||
{
|
||||
if (state.MoveNext(omitKeys))
|
||||
{
|
||||
value = state.Current;
|
||||
yield return value;
|
||||
|
||||
if (value is ArrayExpressionData || value is DictionaryExpressionData)
|
||||
{
|
||||
state = new TraversalState(state, value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
state = state.Parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TraversalState
|
||||
{
|
||||
public TraversalState(
|
||||
TraversalState parent,
|
||||
ExpressionData data)
|
||||
{
|
||||
Parent = parent;
|
||||
m_data = data;
|
||||
}
|
||||
|
||||
public Boolean MoveNext(Boolean omitKeys)
|
||||
{
|
||||
switch (m_data.Type)
|
||||
{
|
||||
case ExpressionDataType.Array:
|
||||
var array = m_data.AssertArray("array");
|
||||
if (++m_index < array.Count)
|
||||
{
|
||||
Current = array[m_index];
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Current = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
case ExpressionDataType.Dictionary:
|
||||
var dictionary = m_data.AssertDictionary("dictionary");
|
||||
|
||||
// Return the value
|
||||
if (m_isKey)
|
||||
{
|
||||
m_isKey = false;
|
||||
Current = dictionary[m_index].Value;
|
||||
return true;
|
||||
}
|
||||
|
||||
if (++m_index < dictionary.Count)
|
||||
{
|
||||
// Skip the key, return the value
|
||||
if (omitKeys)
|
||||
{
|
||||
m_isKey = false;
|
||||
Current = dictionary[m_index].Value;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Return the key
|
||||
m_isKey = true;
|
||||
Current = new StringExpressionData(dictionary[m_index].Key);
|
||||
return true;
|
||||
}
|
||||
|
||||
Current = null;
|
||||
return false;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected {nameof(ExpressionData)} type '{m_data.Type}'");
|
||||
}
|
||||
}
|
||||
|
||||
private ExpressionData m_data;
|
||||
private Int32 m_index = -1;
|
||||
private Boolean m_isKey;
|
||||
public ExpressionData Current;
|
||||
public TraversalState Parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
199
src/Sdk/Expressions/Data/ExpressionDataJsonConverter.cs
Normal file
199
src/Sdk/Expressions/Data/ExpressionDataJsonConverter.cs
Normal file
@@ -0,0 +1,199 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
/// <summary>
|
||||
/// JSON serializer for ExpressionData objects
|
||||
/// </summary>
|
||||
internal sealed class ExpressionDataJsonConverter : JsonConverter
|
||||
{
|
||||
public override Boolean CanWrite
|
||||
{
|
||||
get
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public override Boolean CanConvert(Type objectType)
|
||||
{
|
||||
return typeof(ExpressionData).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo());
|
||||
}
|
||||
|
||||
public override Object ReadJson(
|
||||
JsonReader reader,
|
||||
Type objectType,
|
||||
Object existingValue,
|
||||
JsonSerializer serializer)
|
||||
{
|
||||
switch (reader.TokenType)
|
||||
{
|
||||
case JsonToken.String:
|
||||
return new StringExpressionData(reader.Value.ToString());
|
||||
|
||||
case JsonToken.Boolean:
|
||||
return new BooleanExpressionData((Boolean)reader.Value);
|
||||
|
||||
case JsonToken.Float:
|
||||
return new NumberExpressionData((Double)reader.Value);
|
||||
|
||||
case JsonToken.Integer:
|
||||
return new NumberExpressionData((Double)(Int64)reader.Value);
|
||||
|
||||
case JsonToken.StartObject:
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
Int32? type = null;
|
||||
JObject value = JObject.Load(reader);
|
||||
if (!value.TryGetValue("t", StringComparison.OrdinalIgnoreCase, out JToken typeValue))
|
||||
{
|
||||
type = ExpressionDataType.String;
|
||||
}
|
||||
else if (typeValue.Type == JTokenType.Integer)
|
||||
{
|
||||
type = (Int32)typeValue;
|
||||
}
|
||||
else
|
||||
{
|
||||
return existingValue;
|
||||
}
|
||||
|
||||
Object newValue = null;
|
||||
switch (type)
|
||||
{
|
||||
case ExpressionDataType.String:
|
||||
newValue = new StringExpressionData(null);
|
||||
break;
|
||||
|
||||
case ExpressionDataType.Array:
|
||||
newValue = new ArrayExpressionData();
|
||||
break;
|
||||
|
||||
case ExpressionDataType.Dictionary:
|
||||
newValue = new DictionaryExpressionData();
|
||||
break;
|
||||
|
||||
case ExpressionDataType.Boolean:
|
||||
newValue = new BooleanExpressionData(false);
|
||||
break;
|
||||
|
||||
case ExpressionDataType.Number:
|
||||
newValue = new NumberExpressionData(0);
|
||||
break;
|
||||
|
||||
case ExpressionDataType.CaseSensitiveDictionary:
|
||||
newValue = new CaseSensitiveDictionaryExpressionData();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected {nameof(ExpressionDataType)} '{type}'");
|
||||
}
|
||||
|
||||
if (value != null)
|
||||
{
|
||||
using JsonReader objectReader = value.CreateReader();
|
||||
serializer.Populate(objectReader, newValue);
|
||||
}
|
||||
|
||||
return newValue;
|
||||
}
|
||||
|
||||
public override void WriteJson(
|
||||
JsonWriter writer,
|
||||
Object value,
|
||||
JsonSerializer serializer)
|
||||
{
|
||||
if (Object.ReferenceEquals(value, null))
|
||||
{
|
||||
writer.WriteNull();
|
||||
}
|
||||
else if (value is StringExpressionData stringData)
|
||||
{
|
||||
writer.WriteValue(stringData.Value);
|
||||
}
|
||||
else if (value is BooleanExpressionData boolData)
|
||||
{
|
||||
writer.WriteValue(boolData.Value);
|
||||
}
|
||||
else if (value is NumberExpressionData numberData)
|
||||
{
|
||||
writer.WriteValue(numberData.Value);
|
||||
}
|
||||
else if (value is ArrayExpressionData arrayData)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("t");
|
||||
writer.WriteValue(ExpressionDataType.Array);
|
||||
if (arrayData.Count > 0)
|
||||
{
|
||||
writer.WritePropertyName("a");
|
||||
writer.WriteStartArray();
|
||||
foreach (var item in arrayData)
|
||||
{
|
||||
serializer.Serialize(writer, item);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
else if (value is DictionaryExpressionData dictionaryData)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("t");
|
||||
writer.WriteValue(ExpressionDataType.Dictionary);
|
||||
if (dictionaryData.Count > 0)
|
||||
{
|
||||
writer.WritePropertyName("d");
|
||||
writer.WriteStartArray();
|
||||
foreach (var pair in dictionaryData)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("k");
|
||||
writer.WriteValue(pair.Key);
|
||||
writer.WritePropertyName("v");
|
||||
serializer.Serialize(writer, pair.Value);
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
else if (value is CaseSensitiveDictionaryExpressionData caseSensitiveDictionaryData)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("t");
|
||||
writer.WriteValue(ExpressionDataType.CaseSensitiveDictionary);
|
||||
if (caseSensitiveDictionaryData.Count > 0)
|
||||
{
|
||||
writer.WritePropertyName("d");
|
||||
writer.WriteStartArray();
|
||||
foreach (var pair in caseSensitiveDictionaryData)
|
||||
{
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("k");
|
||||
writer.WriteValue(pair.Key);
|
||||
writer.WritePropertyName("v");
|
||||
serializer.Serialize(writer, pair.Value);
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"Unexpected type '{value.GetType().Name}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
19
src/Sdk/Expressions/Data/ExpressionDataType.cs
Normal file
19
src/Sdk/Expressions/Data/ExpressionDataType.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
internal static class ExpressionDataType
|
||||
{
|
||||
internal const Int32 String = 0;
|
||||
|
||||
internal const Int32 Array = 1;
|
||||
|
||||
internal const Int32 Dictionary = 2;
|
||||
|
||||
internal const Int32 Boolean = 3;
|
||||
|
||||
internal const Int32 Number = 4;
|
||||
|
||||
internal const Int32 CaseSensitiveDictionary = 5;
|
||||
}
|
||||
}
|
||||
64
src/Sdk/Expressions/Data/JTokenExtensions.cs
Normal file
64
src/Sdk/Expressions/Data/JTokenExtensions.cs
Normal file
@@ -0,0 +1,64 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
public static class JTokenExtensions
|
||||
{
|
||||
public static ExpressionData ToExpressionData(this JToken value)
|
||||
{
|
||||
return value.ToExpressionData(1, 100);
|
||||
}
|
||||
|
||||
public static ExpressionData ToExpressionData(
|
||||
this JToken value,
|
||||
Int32 depth,
|
||||
Int32 maxDepth)
|
||||
{
|
||||
if (depth < maxDepth)
|
||||
{
|
||||
if (value.Type == JTokenType.String)
|
||||
{
|
||||
return new StringExpressionData((String)value);
|
||||
}
|
||||
else if (value.Type == JTokenType.Boolean)
|
||||
{
|
||||
return new BooleanExpressionData((Boolean)value);
|
||||
}
|
||||
else if (value.Type == JTokenType.Float || value.Type == JTokenType.Integer)
|
||||
{
|
||||
return new NumberExpressionData((Double)value);
|
||||
}
|
||||
else if (value.Type == JTokenType.Object)
|
||||
{
|
||||
var subContext = new DictionaryExpressionData();
|
||||
var obj = (JObject)value;
|
||||
foreach (var property in obj.Properties())
|
||||
{
|
||||
subContext[property.Name] = ToExpressionData(property.Value, depth + 1, maxDepth);
|
||||
}
|
||||
return subContext;
|
||||
}
|
||||
else if (value.Type == JTokenType.Array)
|
||||
{
|
||||
var arrayContext = new ArrayExpressionData();
|
||||
var arr = (JArray)value;
|
||||
foreach (var element in arr)
|
||||
{
|
||||
arrayContext.Add(ToExpressionData(element, depth + 1, maxDepth));
|
||||
}
|
||||
return arrayContext;
|
||||
}
|
||||
else if (value.Type == JTokenType.Null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// We don't understand the type or have reached our max, return as string
|
||||
return new StringExpressionData(value.ToString());
|
||||
}
|
||||
}
|
||||
}
|
||||
78
src/Sdk/Expressions/Data/NumberExpressionData.cs
Normal file
78
src/Sdk/Expressions/Data/NumberExpressionData.cs
Normal file
@@ -0,0 +1,78 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
public sealed class NumberExpressionData : ExpressionData, INumber
|
||||
{
|
||||
public NumberExpressionData(Double value)
|
||||
: base(ExpressionDataType.Number)
|
||||
{
|
||||
m_value = value;
|
||||
}
|
||||
|
||||
public Double Value
|
||||
{
|
||||
get
|
||||
{
|
||||
return m_value;
|
||||
}
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
return new NumberExpressionData(m_value);
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
if (Double.IsNaN(m_value) || m_value == Double.PositiveInfinity || m_value == Double.NegativeInfinity)
|
||||
{
|
||||
return (JToken)m_value;
|
||||
}
|
||||
|
||||
var floored = Math.Floor(m_value);
|
||||
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
|
||||
{
|
||||
var flooredInt = (Int32)floored;
|
||||
return (JToken)flooredInt;
|
||||
}
|
||||
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
|
||||
{
|
||||
var flooredInt = (Int64)floored;
|
||||
return (JToken)flooredInt;
|
||||
}
|
||||
else
|
||||
{
|
||||
return (JToken)m_value;
|
||||
}
|
||||
}
|
||||
|
||||
public override String ToString()
|
||||
{
|
||||
return m_value.ToString("G15", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
Double INumber.GetNumber()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
|
||||
public static implicit operator Double(NumberExpressionData data)
|
||||
{
|
||||
return data.Value;
|
||||
}
|
||||
|
||||
public static implicit operator NumberExpressionData(Double data)
|
||||
{
|
||||
return new NumberExpressionData(data);
|
||||
}
|
||||
|
||||
[DataMember(Name = "n", EmitDefaultValue = false)]
|
||||
private Double m_value;
|
||||
}
|
||||
}
|
||||
74
src/Sdk/Expressions/Data/StringExpressionData.cs
Normal file
74
src/Sdk/Expressions/Data/StringExpressionData.cs
Normal file
@@ -0,0 +1,74 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Data
|
||||
{
|
||||
[DataContract]
|
||||
public sealed class StringExpressionData : ExpressionData, IString
|
||||
{
|
||||
public StringExpressionData(String value)
|
||||
: base(ExpressionDataType.String)
|
||||
{
|
||||
m_value = value;
|
||||
}
|
||||
|
||||
public String Value
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_value == null)
|
||||
{
|
||||
m_value = String.Empty;
|
||||
}
|
||||
|
||||
return m_value;
|
||||
}
|
||||
}
|
||||
|
||||
public override ExpressionData Clone()
|
||||
{
|
||||
return new StringExpressionData(m_value);
|
||||
}
|
||||
|
||||
public override JToken ToJToken()
|
||||
{
|
||||
return (JToken)m_value;
|
||||
}
|
||||
|
||||
String IString.GetString()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
|
||||
public override String ToString()
|
||||
{
|
||||
return Value;
|
||||
}
|
||||
|
||||
public static implicit operator String(StringExpressionData data)
|
||||
{
|
||||
return data.Value;
|
||||
}
|
||||
|
||||
public static implicit operator StringExpressionData(String data)
|
||||
{
|
||||
return new StringExpressionData(data);
|
||||
}
|
||||
|
||||
[OnSerializing]
|
||||
private void OnSerializing(StreamingContext context)
|
||||
{
|
||||
if (m_value?.Length == 0)
|
||||
{
|
||||
m_value = null;
|
||||
}
|
||||
}
|
||||
|
||||
[DataMember(Name = "s", EmitDefaultValue = false)]
|
||||
private String m_value;
|
||||
}
|
||||
}
|
||||
50
src/Sdk/Expressions/EvaluationOptions.cs
Normal file
50
src/Sdk/Expressions/EvaluationOptions.cs
Normal file
@@ -0,0 +1,50 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public sealed class EvaluationOptions
|
||||
{
|
||||
public EvaluationOptions()
|
||||
{
|
||||
}
|
||||
|
||||
public EvaluationOptions(EvaluationOptions copy)
|
||||
{
|
||||
if (copy != null)
|
||||
{
|
||||
MaxMemory = copy.MaxMemory;
|
||||
MaxCacheMemory = copy.MaxCacheMemory;
|
||||
StrictJsonParsing = copy.StrictJsonParsing;
|
||||
AlwaysTraceExpanded = copy.AlwaysTraceExpanded;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Maximum memory (in bytes) allowed during expression evaluation.
|
||||
/// Memory is tracked across the entire expression tree evaluation to protect against DOS attacks.
|
||||
/// Default is 1 MB (1048576 bytes) if not specified.
|
||||
/// </summary>
|
||||
public Int32 MaxMemory { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum memory (in bytes) allowed for caching expanded expression results during tracing.
|
||||
/// When exceeded, the cache is cleared and expressions may not be fully expanded in trace output.
|
||||
/// Default is 1 MB (1048576 bytes) if not specified.
|
||||
/// </summary>
|
||||
public Int32 MaxCacheMemory { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enforce strict JSON parsing in the fromJson function.
|
||||
/// When true, rejects JSON with comments, trailing commas, single quotes, and other non-standard features.
|
||||
/// Default is false if not specified.
|
||||
/// </summary>
|
||||
public Boolean StrictJsonParsing { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to always include the expanded expression in trace output.
|
||||
/// When true, the expanded expression is always traced even if it matches the original expression or result.
|
||||
/// Default is false if not specified.
|
||||
/// </summary>
|
||||
public Boolean AlwaysTraceExpanded { get; set; }
|
||||
}
|
||||
}
|
||||
459
src/Sdk/Expressions/EvaluationResult.cs
Normal file
459
src/Sdk/Expressions/EvaluationResult.cs
Normal file
@@ -0,0 +1,459 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public sealed class EvaluationResult
|
||||
{
|
||||
internal EvaluationResult(
|
||||
EvaluationContext context,
|
||||
Int32 level,
|
||||
Object val,
|
||||
ValueKind kind,
|
||||
Object raw)
|
||||
: this(context, level, val, kind, raw, false)
|
||||
{
|
||||
}
|
||||
|
||||
internal EvaluationResult(
|
||||
EvaluationContext context,
|
||||
Int32 level,
|
||||
Object val,
|
||||
ValueKind kind,
|
||||
Object raw,
|
||||
Boolean omitTracing)
|
||||
{
|
||||
m_level = level;
|
||||
Value = val;
|
||||
Kind = kind;
|
||||
Raw = raw;
|
||||
m_omitTracing = omitTracing;
|
||||
|
||||
if (!omitTracing)
|
||||
{
|
||||
TraceValue(context);
|
||||
}
|
||||
}
|
||||
|
||||
public ValueKind Kind { get; }
|
||||
|
||||
/// <summary>
|
||||
/// When an interface converter is applied to the node result, raw contains the original value
|
||||
/// </summary>
|
||||
public Object Raw { get; }
|
||||
|
||||
public Object Value { get; }
|
||||
|
||||
public Boolean IsFalsy
|
||||
{
|
||||
get
|
||||
{
|
||||
switch (Kind)
|
||||
{
|
||||
case ValueKind.Null:
|
||||
return true;
|
||||
case ValueKind.Boolean:
|
||||
var boolean = (Boolean)Value;
|
||||
return !boolean;
|
||||
case ValueKind.Number:
|
||||
var number = (Double)Value;
|
||||
return number == 0d || Double.IsNaN(number);
|
||||
case ValueKind.String:
|
||||
var str = (String)Value;
|
||||
return String.Equals(str, String.Empty, StringComparison.Ordinal);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean IsPrimitive => ExpressionUtility.IsPrimitive(Kind);
|
||||
|
||||
public Boolean IsTruthy => !IsFalsy;
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractEqual(EvaluationResult right)
|
||||
{
|
||||
return AbstractEqual(Value, right.Value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractGreaterThan(EvaluationResult right)
|
||||
{
|
||||
return AbstractGreaterThan(Value, right.Value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractGreaterThanOrEqual(EvaluationResult right)
|
||||
{
|
||||
return AbstractEqual(Value, right.Value) || AbstractGreaterThan(Value, right.Value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractLessThan(EvaluationResult right)
|
||||
{
|
||||
return AbstractLessThan(Value, right.Value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractLessThanOrEqual(EvaluationResult right)
|
||||
{
|
||||
return AbstractEqual(Value, right.Value) || AbstractLessThan(Value, right.Value);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
public Boolean AbstractNotEqual(EvaluationResult right)
|
||||
{
|
||||
return !AbstractEqual(Value, right.Value);
|
||||
}
|
||||
|
||||
public Double ConvertToNumber()
|
||||
{
|
||||
return ConvertToNumber(Value);
|
||||
}
|
||||
|
||||
public String ConvertToString()
|
||||
{
|
||||
switch (Kind)
|
||||
{
|
||||
case ValueKind.Null:
|
||||
return String.Empty;
|
||||
|
||||
case ValueKind.Boolean:
|
||||
return ((Boolean)Value) ? ExpressionConstants.True : ExpressionConstants.False;
|
||||
|
||||
case ValueKind.Number:
|
||||
if ((Double)Value == -0)
|
||||
{
|
||||
// .NET Core 3.0 now prints negative zero as -0, so we need this to keep out behavior consistent
|
||||
return ((Double)0).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture);
|
||||
}
|
||||
return ((Double)Value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture);
|
||||
|
||||
case ValueKind.String:
|
||||
return Value as String;
|
||||
|
||||
default:
|
||||
return Kind.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean TryGetCollectionInterface(out Object collection)
|
||||
{
|
||||
if ((Kind == ValueKind.Object || Kind == ValueKind.Array))
|
||||
{
|
||||
var obj = Value;
|
||||
if (obj is IReadOnlyObject)
|
||||
{
|
||||
collection = obj;
|
||||
return true;
|
||||
}
|
||||
else if (obj is IReadOnlyArray)
|
||||
{
|
||||
collection = obj;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
collection = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Useful for working with values that are not the direct evaluation result of a parameter.
|
||||
/// This allows ExpressionNode authors to leverage the coercion and comparison functions
|
||||
/// for any values.
|
||||
///
|
||||
/// Also note, the value will be canonicalized (for example numeric types converted to double) and any
|
||||
/// matching interfaces applied.
|
||||
/// </summary>
|
||||
public static EvaluationResult CreateIntermediateResult(
|
||||
EvaluationContext context,
|
||||
Object obj)
|
||||
{
|
||||
var val = ExpressionUtility.ConvertToCanonicalValue(obj, out ValueKind kind, out Object raw);
|
||||
return new EvaluationResult(context, 0, val, kind, raw, omitTracing: true);
|
||||
}
|
||||
|
||||
private void TraceValue(EvaluationContext context)
|
||||
{
|
||||
if (!m_omitTracing)
|
||||
{
|
||||
TraceValue(context, Value, Kind);
|
||||
}
|
||||
}
|
||||
|
||||
private void TraceValue(
|
||||
EvaluationContext context,
|
||||
Object val,
|
||||
ValueKind kind)
|
||||
{
|
||||
if (!m_omitTracing)
|
||||
{
|
||||
TraceVerbose(context, String.Concat("=> ", ExpressionUtility.FormatValue(context?.SecretMasker, val, kind)));
|
||||
}
|
||||
}
|
||||
|
||||
private void TraceVerbose(
|
||||
EvaluationContext context,
|
||||
String message)
|
||||
{
|
||||
if (!m_omitTracing)
|
||||
{
|
||||
context?.Trace.Verbose(String.Empty.PadLeft(m_level * 2, '.') + (message ?? String.Empty));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
private static Boolean AbstractEqual(
|
||||
Object canonicalLeftValue,
|
||||
Object canonicalRightValue)
|
||||
{
|
||||
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
|
||||
|
||||
// Same kind
|
||||
if (leftKind == rightKind)
|
||||
{
|
||||
switch (leftKind)
|
||||
{
|
||||
// Null, Null
|
||||
case ValueKind.Null:
|
||||
return true;
|
||||
|
||||
// Number, Number
|
||||
case ValueKind.Number:
|
||||
var leftDouble = (Double)canonicalLeftValue;
|
||||
var rightDouble = (Double)canonicalRightValue;
|
||||
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return leftDouble == rightDouble;
|
||||
|
||||
// String, String
|
||||
case ValueKind.String:
|
||||
var leftString = (String)canonicalLeftValue;
|
||||
var rightString = (String)canonicalRightValue;
|
||||
return String.Equals(leftString, rightString, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
// Boolean, Boolean
|
||||
case ValueKind.Boolean:
|
||||
var leftBoolean = (Boolean)canonicalLeftValue;
|
||||
var rightBoolean = (Boolean)canonicalRightValue;
|
||||
return leftBoolean == rightBoolean;
|
||||
|
||||
// Object, Object
|
||||
case ValueKind.Object:
|
||||
case ValueKind.Array:
|
||||
return Object.ReferenceEquals(canonicalLeftValue, canonicalRightValue);
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
private static Boolean AbstractGreaterThan(
|
||||
Object canonicalLeftValue,
|
||||
Object canonicalRightValue)
|
||||
{
|
||||
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
|
||||
|
||||
// Same kind
|
||||
if (leftKind == rightKind)
|
||||
{
|
||||
switch (leftKind)
|
||||
{
|
||||
// Number, Number
|
||||
case ValueKind.Number:
|
||||
var leftDouble = (Double)canonicalLeftValue;
|
||||
var rightDouble = (Double)canonicalRightValue;
|
||||
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return leftDouble > rightDouble;
|
||||
|
||||
// String, String
|
||||
case ValueKind.String:
|
||||
var leftString = (String)canonicalLeftValue;
|
||||
var rightString = (String)canonicalRightValue;
|
||||
return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) > 0;
|
||||
|
||||
// Boolean, Boolean
|
||||
case ValueKind.Boolean:
|
||||
var leftBoolean = (Boolean)canonicalLeftValue;
|
||||
var rightBoolean = (Boolean)canonicalRightValue;
|
||||
return leftBoolean && !rightBoolean;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives.
|
||||
/// </summary>
|
||||
private static Boolean AbstractLessThan(
|
||||
Object canonicalLeftValue,
|
||||
Object canonicalRightValue)
|
||||
{
|
||||
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind);
|
||||
|
||||
// Same kind
|
||||
if (leftKind == rightKind)
|
||||
{
|
||||
switch (leftKind)
|
||||
{
|
||||
// Number, Number
|
||||
case ValueKind.Number:
|
||||
var leftDouble = (Double)canonicalLeftValue;
|
||||
var rightDouble = (Double)canonicalRightValue;
|
||||
if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return leftDouble < rightDouble;
|
||||
|
||||
// String, String
|
||||
case ValueKind.String:
|
||||
var leftString = (String)canonicalLeftValue;
|
||||
var rightString = (String)canonicalRightValue;
|
||||
return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) < 0;
|
||||
|
||||
// Boolean, Boolean
|
||||
case ValueKind.Boolean:
|
||||
var leftBoolean = (Boolean)canonicalLeftValue;
|
||||
var rightBoolean = (Boolean)canonicalRightValue;
|
||||
return !leftBoolean && rightBoolean;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3.
|
||||
/// Except objects are not coerced to primitives.
|
||||
private static void CoerceTypes(
|
||||
ref Object canonicalLeftValue,
|
||||
ref Object canonicalRightValue,
|
||||
out ValueKind leftKind,
|
||||
out ValueKind rightKind)
|
||||
{
|
||||
leftKind = GetKind(canonicalLeftValue);
|
||||
rightKind = GetKind(canonicalRightValue);
|
||||
|
||||
// Same kind
|
||||
if (leftKind == rightKind)
|
||||
{
|
||||
}
|
||||
// Number, String
|
||||
else if (leftKind == ValueKind.Number && rightKind == ValueKind.String)
|
||||
{
|
||||
canonicalRightValue = ConvertToNumber(canonicalRightValue);
|
||||
rightKind = ValueKind.Number;
|
||||
}
|
||||
// String, Number
|
||||
else if (leftKind == ValueKind.String && rightKind == ValueKind.Number)
|
||||
{
|
||||
canonicalLeftValue = ConvertToNumber(canonicalLeftValue);
|
||||
leftKind = ValueKind.Number;
|
||||
}
|
||||
// Boolean|Null, Any
|
||||
else if (leftKind == ValueKind.Boolean || leftKind == ValueKind.Null)
|
||||
{
|
||||
canonicalLeftValue = ConvertToNumber(canonicalLeftValue);
|
||||
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind);
|
||||
}
|
||||
// Any, Boolean|Null
|
||||
else if (rightKind == ValueKind.Boolean || rightKind == ValueKind.Null)
|
||||
{
|
||||
canonicalRightValue = ConvertToNumber(canonicalRightValue);
|
||||
CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// For primitives, follows the Javascript rules (the Number function in Javascript). Otherwise NaN.
|
||||
/// </summary>
|
||||
private static Double ConvertToNumber(Object canonicalValue)
|
||||
{
|
||||
var kind = GetKind(canonicalValue);
|
||||
switch (kind)
|
||||
{
|
||||
case ValueKind.Null:
|
||||
return 0d;
|
||||
case ValueKind.Boolean:
|
||||
return (Boolean)canonicalValue ? 1d : 0d;
|
||||
case ValueKind.Number:
|
||||
return (Double)canonicalValue;
|
||||
case ValueKind.String:
|
||||
return ExpressionUtility.ParseNumber(canonicalValue as String);
|
||||
}
|
||||
|
||||
return Double.NaN;
|
||||
}
|
||||
|
||||
private static ValueKind GetKind(Object canonicalValue)
|
||||
{
|
||||
if (Object.ReferenceEquals(canonicalValue, null))
|
||||
{
|
||||
return ValueKind.Null;
|
||||
}
|
||||
else if (canonicalValue is Boolean)
|
||||
{
|
||||
return ValueKind.Boolean;
|
||||
}
|
||||
else if (canonicalValue is Double)
|
||||
{
|
||||
return ValueKind.Number;
|
||||
}
|
||||
else if (canonicalValue is String)
|
||||
{
|
||||
return ValueKind.String;
|
||||
}
|
||||
else if (canonicalValue is IReadOnlyObject)
|
||||
{
|
||||
return ValueKind.Object;
|
||||
}
|
||||
else if (canonicalValue is IReadOnlyArray)
|
||||
{
|
||||
return ValueKind.Array;
|
||||
}
|
||||
|
||||
return ValueKind.Object;
|
||||
}
|
||||
|
||||
private readonly Int32 m_level;
|
||||
private readonly Boolean m_omitTracing;
|
||||
}
|
||||
}
|
||||
62
src/Sdk/Expressions/ExpressionConstants.cs
Normal file
62
src/Sdk/Expressions/ExpressionConstants.cs
Normal file
@@ -0,0 +1,62 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using GitHub.Actions.Expressions.Sdk.Functions;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public static class ExpressionConstants
|
||||
{
|
||||
static ExpressionConstants()
|
||||
{
|
||||
AddFunction<Contains>("contains", 2, 2);
|
||||
AddFunction<EndsWith>("endsWith", 2, 2);
|
||||
AddFunction<Format>("format", 1, Byte.MaxValue);
|
||||
AddFunction<Join>("join", 1, 2);
|
||||
AddFunction<StartsWith>("startsWith", 2, 2);
|
||||
AddFunction<ToJson>("toJson", 1, 1);
|
||||
AddFunction<FromJson>("fromJson", 1, 1);
|
||||
}
|
||||
|
||||
private static void AddFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
|
||||
where T : Function, new()
|
||||
{
|
||||
s_wellKnownFunctions.Add(name, new FunctionInfo<T>(name, minParameters, maxParameters));
|
||||
}
|
||||
|
||||
internal static readonly String False = "false";
|
||||
internal static readonly String Infinity = "Infinity";
|
||||
internal static readonly Int32 MaxDepth = 50;
|
||||
internal static readonly Int32 MaxLength = 21000; // Under 85,000 large object heap threshold, even if .NET switches to UTF-32
|
||||
internal static readonly String NaN = "NaN";
|
||||
internal static readonly String NegativeInfinity = "-Infinity";
|
||||
public static readonly String Null = "null";
|
||||
internal static readonly String NumberFormat = "G15";
|
||||
internal static readonly String True = "true";
|
||||
private static readonly Dictionary<String, IFunctionInfo> s_wellKnownFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public static readonly IReadOnlyDictionary<String, IFunctionInfo> WellKnownFunctions = new ReadOnlyDictionary<String, IFunctionInfo>(s_wellKnownFunctions);
|
||||
|
||||
// Punctuation
|
||||
internal const Char StartGroup = '('; // logical grouping
|
||||
internal const Char StartIndex = '[';
|
||||
public static readonly Char StartParameter = '('; // function call
|
||||
internal const Char EndGroup = ')'; // logical grouping
|
||||
internal const Char EndIndex = ']';
|
||||
public static readonly Char EndParameter = ')'; // function calll
|
||||
internal const Char Separator = ',';
|
||||
internal const Char Dereference = '.';
|
||||
internal const Char Wildcard = '*';
|
||||
|
||||
// Operators
|
||||
internal const String Not = "!";
|
||||
internal const String NotEqual = "!=";
|
||||
internal const String GreaterThan = ">";
|
||||
internal const String GreaterThanOrEqual = ">=";
|
||||
internal const String LessThan = "<";
|
||||
internal const String LessThanOrEqual = "<=";
|
||||
internal const String Equal = "==";
|
||||
internal const String And = "&&";
|
||||
internal const String Or = "||";
|
||||
}
|
||||
}
|
||||
21
src/Sdk/Expressions/ExpressionException.cs
Normal file
21
src/Sdk/Expressions/ExpressionException.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public class ExpressionException : Exception
|
||||
{
|
||||
internal ExpressionException(ISecretMasker secretMasker, String message)
|
||||
{
|
||||
if (secretMasker != null)
|
||||
{
|
||||
message = secretMasker.MaskSecrets(message);
|
||||
}
|
||||
|
||||
m_message = message;
|
||||
}
|
||||
|
||||
public override String Message => m_message;
|
||||
|
||||
private readonly String m_message;
|
||||
}
|
||||
}
|
||||
471
src/Sdk/Expressions/ExpressionParser.cs
Normal file
471
src/Sdk/Expressions/ExpressionParser.cs
Normal file
@@ -0,0 +1,471 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.Expressions.Sdk.Operators;
|
||||
using GitHub.Actions.Expressions.Tokens;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using GitHub.Actions.Expressions.Sdk.Functions;
|
||||
|
||||
public sealed class ExpressionParser
|
||||
{
|
||||
public IExpressionNode CreateTree(
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues, functions);
|
||||
context.Trace.Info($"Parsing expression: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
|
||||
public IExpressionNode ValidateSyntax(
|
||||
String expression,
|
||||
ITraceWriter trace)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues: null, functions: null, allowUnknownKeywords: true);
|
||||
context.Trace.Info($"Validating expression syntax: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
|
||||
private static IExpressionNode CreateTree(ParseContext context)
|
||||
{
|
||||
// Push the tokens
|
||||
while (context.LexicalAnalyzer.TryGetNextToken(ref context.Token))
|
||||
{
|
||||
// Unexpected
|
||||
if (context.Token.Kind == TokenKind.Unexpected)
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression);
|
||||
}
|
||||
// Operator
|
||||
else if (context.Token.IsOperator)
|
||||
{
|
||||
PushOperator(context);
|
||||
}
|
||||
// Operand
|
||||
else
|
||||
{
|
||||
PushOperand(context);
|
||||
}
|
||||
|
||||
context.LastToken = context.Token;
|
||||
}
|
||||
|
||||
// No tokens
|
||||
if (context.LastToken == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check unexpected end of expression
|
||||
if (context.Operators.Count > 0)
|
||||
{
|
||||
var unexpectedLastToken = false;
|
||||
switch (context.LastToken.Kind)
|
||||
{
|
||||
case TokenKind.EndGroup: // ")" logical grouping
|
||||
case TokenKind.EndIndex: // "]"
|
||||
case TokenKind.EndParameters: // ")" function call
|
||||
// Legal
|
||||
break;
|
||||
case TokenKind.Function:
|
||||
// Illegal
|
||||
unexpectedLastToken = true;
|
||||
break;
|
||||
default:
|
||||
unexpectedLastToken = context.LastToken.IsOperator;
|
||||
break;
|
||||
}
|
||||
|
||||
if (unexpectedLastToken || context.LexicalAnalyzer.UnclosedTokens.Any())
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.UnexpectedEndOfExpression, context.LastToken, context.Expression);
|
||||
}
|
||||
}
|
||||
|
||||
// Flush operators
|
||||
while (context.Operators.Count > 0)
|
||||
{
|
||||
FlushTopOperator(context);
|
||||
}
|
||||
|
||||
// Check max depth
|
||||
var result = context.Operands.Single();
|
||||
CheckMaxDepth(context, result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void PushOperand(ParseContext context)
|
||||
{
|
||||
// Create the node
|
||||
var node = default(ExpressionNode);
|
||||
switch (context.Token.Kind)
|
||||
{
|
||||
// Function
|
||||
case TokenKind.Function:
|
||||
var function = context.Token.RawValue;
|
||||
if (TryGetFunctionInfo(context, function, out var functionInfo))
|
||||
{
|
||||
node = functionInfo.CreateNode();
|
||||
node.Name = function;
|
||||
}
|
||||
else if (context.AllowUnknownKeywords)
|
||||
{
|
||||
node = new NoOperation();
|
||||
node.Name = function;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.UnrecognizedFunction, context.Token, context.Expression);
|
||||
}
|
||||
break;
|
||||
|
||||
// Named-value
|
||||
case TokenKind.NamedValue:
|
||||
var name = context.Token.RawValue;
|
||||
if (context.ExtensionNamedValues.TryGetValue(name, out var namedValueInfo))
|
||||
{
|
||||
node = namedValueInfo.CreateNode();
|
||||
node.Name = name;
|
||||
|
||||
}
|
||||
else if (context.AllowUnknownKeywords)
|
||||
{
|
||||
node = new NoOperationNamedValue();
|
||||
node.Name = name;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.UnrecognizedNamedValue, context.Token, context.Expression);
|
||||
}
|
||||
break;
|
||||
|
||||
// Otherwise simple
|
||||
default:
|
||||
node = context.Token.ToNode();
|
||||
break;
|
||||
}
|
||||
|
||||
// Push the operand
|
||||
context.Operands.Push(node);
|
||||
}
|
||||
|
||||
private static void PushOperator(ParseContext context)
|
||||
{
|
||||
// Flush higher or equal precedence
|
||||
if (context.Token.Associativity == Associativity.LeftToRight)
|
||||
{
|
||||
var precedence = context.Token.Precedence;
|
||||
while (context.Operators.Count > 0)
|
||||
{
|
||||
var topOperator = context.Operators.Peek();
|
||||
if (precedence <= topOperator.Precedence &&
|
||||
topOperator.Kind != TokenKind.StartGroup && // Unless top is "(" logical grouping
|
||||
topOperator.Kind != TokenKind.StartIndex && // or unless top is "["
|
||||
topOperator.Kind != TokenKind.StartParameters &&// or unless top is "(" function call
|
||||
topOperator.Kind != TokenKind.Separator) // or unless top is ","
|
||||
{
|
||||
FlushTopOperator(context);
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Push the operator
|
||||
context.Operators.Push(context.Token);
|
||||
|
||||
// Process closing operators now, since context.LastToken is required
|
||||
// to accurately process TokenKind.EndParameters
|
||||
switch (context.Token.Kind)
|
||||
{
|
||||
case TokenKind.EndGroup: // ")" logical grouping
|
||||
case TokenKind.EndIndex: // "]"
|
||||
case TokenKind.EndParameters: // ")" function call
|
||||
FlushTopOperator(context);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static void FlushTopOperator(ParseContext context)
|
||||
{
|
||||
// Special handling for closing operators
|
||||
switch (context.Operators.Peek().Kind)
|
||||
{
|
||||
case TokenKind.EndIndex: // "]"
|
||||
FlushTopEndIndex(context);
|
||||
return;
|
||||
|
||||
case TokenKind.EndGroup: // ")" logical grouping
|
||||
FlushTopEndGroup(context);
|
||||
return;
|
||||
|
||||
case TokenKind.EndParameters: // ")" function call
|
||||
FlushTopEndParameters(context);
|
||||
return;
|
||||
}
|
||||
|
||||
// Pop the operator
|
||||
var @operator = context.Operators.Pop();
|
||||
|
||||
// Create the node
|
||||
var node = (Container)@operator.ToNode();
|
||||
|
||||
// Pop the operands, add to the node
|
||||
var operands = PopOperands(context, @operator.OperandCount);
|
||||
foreach (var operand in operands)
|
||||
{
|
||||
// Flatten nested And
|
||||
if (node is And)
|
||||
{
|
||||
if (operand is And nestedAnd)
|
||||
{
|
||||
foreach (var nestedParameter in nestedAnd.Parameters)
|
||||
{
|
||||
node.AddParameter(nestedParameter);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Flatten nested Or
|
||||
else if (node is Or)
|
||||
{
|
||||
if (operand is Or nestedOr)
|
||||
{
|
||||
foreach (var nestedParameter in nestedOr.Parameters)
|
||||
{
|
||||
node.AddParameter(nestedParameter);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
node.AddParameter(operand);
|
||||
}
|
||||
|
||||
// Push the node to the operand stack
|
||||
context.Operands.Push(node);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Flushes the ")" logical grouping operator
|
||||
/// </summary>
|
||||
private static void FlushTopEndGroup(ParseContext context)
|
||||
{
|
||||
// Pop the operators
|
||||
PopOperator(context, TokenKind.EndGroup); // ")" logical grouping
|
||||
PopOperator(context, TokenKind.StartGroup); // "(" logical grouping
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Flushes the "]" operator
|
||||
/// </summary>
|
||||
private static void FlushTopEndIndex(ParseContext context)
|
||||
{
|
||||
// Pop the operators
|
||||
PopOperator(context, TokenKind.EndIndex); // "]"
|
||||
var @operator = PopOperator(context, TokenKind.StartIndex); // "["
|
||||
|
||||
// Create the node
|
||||
var node = (Container)@operator.ToNode();
|
||||
|
||||
// Pop the operands, add to the node
|
||||
var operands = PopOperands(context, @operator.OperandCount);
|
||||
foreach (var operand in operands)
|
||||
{
|
||||
node.AddParameter(operand);
|
||||
}
|
||||
|
||||
// Push the node to the operand stack
|
||||
context.Operands.Push(node);
|
||||
}
|
||||
|
||||
// ")" function call
|
||||
private static void FlushTopEndParameters(ParseContext context)
|
||||
{
|
||||
// Pop the operator
|
||||
var @operator = PopOperator(context, TokenKind.EndParameters); // ")" function call
|
||||
|
||||
// Sanity check top operator is the current token
|
||||
if (!Object.ReferenceEquals(@operator, context.Token))
|
||||
{
|
||||
throw new InvalidOperationException("Expected the operator to be the current token");
|
||||
}
|
||||
|
||||
var function = default(Function);
|
||||
|
||||
// No parameters
|
||||
if (context.LastToken.Kind == TokenKind.StartParameters)
|
||||
{
|
||||
// Node already exists on the operand stack
|
||||
function = (Function)context.Operands.Peek();
|
||||
}
|
||||
// Has parameters
|
||||
else
|
||||
{
|
||||
// Pop the operands
|
||||
var parameterCount = 1;
|
||||
while (context.Operators.Peek().Kind == TokenKind.Separator)
|
||||
{
|
||||
parameterCount++;
|
||||
context.Operators.Pop();
|
||||
}
|
||||
var functionOperands = PopOperands(context, parameterCount);
|
||||
|
||||
// Node already exists on the operand stack
|
||||
function = (Function)context.Operands.Peek();
|
||||
|
||||
// Add the operands to the node
|
||||
foreach (var operand in functionOperands)
|
||||
{
|
||||
function.AddParameter(operand);
|
||||
}
|
||||
}
|
||||
|
||||
// Pop the "(" operator too
|
||||
@operator = PopOperator(context, TokenKind.StartParameters);
|
||||
|
||||
// Check min/max parameter count
|
||||
TryGetFunctionInfo(context, function.Name, out var functionInfo);
|
||||
if (functionInfo == null && context.AllowUnknownKeywords)
|
||||
{
|
||||
// Don't check min/max
|
||||
}
|
||||
else if (function.Parameters.Count < functionInfo.MinParameters)
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.TooFewParameters, token: @operator, expression: context.Expression);
|
||||
}
|
||||
else if (function.Parameters.Count > functionInfo.MaxParameters)
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.TooManyParameters, token: @operator, expression: context.Expression);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pops N operands from the operand stack. The operands are returned
|
||||
/// in their natural listed order, i.e. not last-in-first-out.
|
||||
/// </summary>
|
||||
private static List<ExpressionNode> PopOperands(
|
||||
ParseContext context,
|
||||
Int32 count)
|
||||
{
|
||||
var result = new List<ExpressionNode>();
|
||||
while (count-- > 0)
|
||||
{
|
||||
result.Add(context.Operands.Pop());
|
||||
}
|
||||
|
||||
result.Reverse();
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pops an operator and asserts it is the expected kind.
|
||||
/// </summary>
|
||||
private static Token PopOperator(
|
||||
ParseContext context,
|
||||
TokenKind expected)
|
||||
{
|
||||
var token = context.Operators.Pop();
|
||||
if (token.Kind != expected)
|
||||
{
|
||||
throw new NotSupportedException($"Expected operator '{expected}' to be popped. Actual '{token.Kind}'.");
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks the max depth of the expression tree
|
||||
/// </summary>
|
||||
private static void CheckMaxDepth(
|
||||
ParseContext context,
|
||||
ExpressionNode node,
|
||||
Int32 depth = 1)
|
||||
{
|
||||
if (depth > ExpressionConstants.MaxDepth)
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.ExceededMaxDepth, token: null, expression: context.Expression);
|
||||
}
|
||||
|
||||
if (node is Container container)
|
||||
{
|
||||
foreach (var parameter in container.Parameters)
|
||||
{
|
||||
CheckMaxDepth(context, parameter, depth + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Boolean TryGetFunctionInfo(
|
||||
ParseContext context,
|
||||
String name,
|
||||
out IFunctionInfo functionInfo)
|
||||
{
|
||||
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
|
||||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
|
||||
}
|
||||
|
||||
private sealed class ParseContext
|
||||
{
|
||||
public Boolean AllowUnknownKeywords;
|
||||
public readonly String Expression;
|
||||
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly LexicalAnalyzer LexicalAnalyzer;
|
||||
public readonly Stack<ExpressionNode> Operands = new Stack<ExpressionNode>();
|
||||
public readonly Stack<Token> Operators = new Stack<Token>();
|
||||
public readonly ITraceWriter Trace;
|
||||
public Token Token;
|
||||
public Token LastToken;
|
||||
|
||||
public ParseContext(
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowUnknownKeywords = false)
|
||||
{
|
||||
Expression = expression ?? String.Empty;
|
||||
if (Expression.Length > ExpressionConstants.MaxLength)
|
||||
{
|
||||
throw new ParseException(ParseExceptionKind.ExceededMaxLength, token: null, expression: Expression);
|
||||
}
|
||||
|
||||
Trace = trace ?? new NoOperationTraceWriter();
|
||||
foreach (var namedValueInfo in (namedValues ?? new INamedValueInfo[0]))
|
||||
{
|
||||
ExtensionNamedValues.Add(namedValueInfo.Name, namedValueInfo);
|
||||
}
|
||||
|
||||
foreach (var functionInfo in (functions ?? new IFunctionInfo[0]))
|
||||
{
|
||||
ExtensionFunctions.Add(functionInfo.Name, functionInfo);
|
||||
}
|
||||
|
||||
LexicalAnalyzer = new LexicalAnalyzer(Expression);
|
||||
AllowUnknownKeywords = allowUnknownKeywords;
|
||||
}
|
||||
|
||||
private class NoOperationTraceWriter : ITraceWriter
|
||||
{
|
||||
public void Info(String message)
|
||||
{
|
||||
}
|
||||
|
||||
public void Verbose(String message)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
27
src/Sdk/Expressions/FunctionInfo.cs
Normal file
27
src/Sdk/Expressions/FunctionInfo.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public class FunctionInfo<T> : IFunctionInfo
|
||||
where T : Function, new()
|
||||
{
|
||||
public FunctionInfo(String name, Int32 minParameters, Int32 maxParameters)
|
||||
{
|
||||
Name = name;
|
||||
MinParameters = minParameters;
|
||||
MaxParameters = maxParameters;
|
||||
}
|
||||
|
||||
public String Name { get; }
|
||||
|
||||
public Int32 MinParameters { get; }
|
||||
|
||||
public Int32 MaxParameters { get; }
|
||||
|
||||
public Function CreateNode()
|
||||
{
|
||||
return new T();
|
||||
}
|
||||
}
|
||||
}
|
||||
24
src/Sdk/Expressions/IExpressionNode.cs
Normal file
24
src/Sdk/Expressions/IExpressionNode.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
#nullable enable
|
||||
|
||||
using System;
|
||||
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public interface IExpressionNode
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluates the expression and returns the result, wrapped in a helper
|
||||
/// for converting, comparing, and traversing objects.
|
||||
/// </summary>
|
||||
/// <param name="trace">Optional trace writer</param>
|
||||
/// <param name="secretMasker">Optional secret masker</param>
|
||||
/// <param name="state">State object for custom evaluation function nodes and custom named-value nodes</param>
|
||||
/// <param name="options">Evaluation options</param>
|
||||
EvaluationResult Evaluate(
|
||||
ITraceWriter trace,
|
||||
ISecretMasker? secretMasker,
|
||||
Object state,
|
||||
EvaluationOptions options);
|
||||
}
|
||||
}
|
||||
320
src/Sdk/Expressions/IExpressionNodeExtensions.cs
Normal file
320
src/Sdk/Expressions/IExpressionNodeExtensions.cs
Normal file
@@ -0,0 +1,320 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
using Index = GitHub.Actions.Expressions.Sdk.Operators.Index;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public static class IExpressionNodeExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns the node and all descendant nodes
|
||||
/// </summary>
|
||||
public static IEnumerable<IExpressionNode> Traverse(this IExpressionNode node)
|
||||
{
|
||||
yield return node;
|
||||
|
||||
if (node is Container container && container.Parameters.Count > 0)
|
||||
{
|
||||
foreach (var parameter in container.Parameters)
|
||||
{
|
||||
foreach (var descendant in parameter.Traverse())
|
||||
{
|
||||
yield return descendant;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks whether specific contexts or sub-properties of contexts are referenced.
|
||||
/// If a conclusive determination cannot be made, then the pattern is considered matched.
|
||||
/// For example, the expression "toJson(github)" matches the pattern "github.event" because
|
||||
/// the value is passed to a function. Not enough information is known to determine whether
|
||||
/// the function requires the sub-property. Therefore, assume it is required.
|
||||
///
|
||||
/// Patterns may contain wildcards to match any literal. For example, the pattern
|
||||
/// "needs.*.outputs" will produce a match for the expression "needs.my-job.outputs.my-output".
|
||||
/// </summary>
|
||||
public static Boolean[] CheckReferencesContext(
|
||||
this IExpressionNode tree,
|
||||
params String[] patterns)
|
||||
{
|
||||
// The result is an array of booleans, one per pattern
|
||||
var result = new Boolean[patterns.Length];
|
||||
|
||||
// Stores the match segments for each pattern. For example
|
||||
// the patterns [ "github.event", "needs.*.outputs" ] would
|
||||
// be stored as:
|
||||
// [
|
||||
// [
|
||||
// NamedValue:github
|
||||
// Literal:"event"
|
||||
// ],
|
||||
// [
|
||||
// NamedValue:needs
|
||||
// Wildcard:*
|
||||
// Literal:"outputs"
|
||||
// ]
|
||||
// ]
|
||||
var segmentedPatterns = default(Stack<IExpressionNode>[]);
|
||||
|
||||
// Walk the expression tree
|
||||
var stack = new Stack<IExpressionNode>();
|
||||
stack.Push(tree);
|
||||
while (stack.Count > 0)
|
||||
{
|
||||
var node = stack.Pop();
|
||||
|
||||
// Attempt to match a named-value or index operator.
|
||||
// Note, when entering this block, descendant nodes are only pushed
|
||||
// to the stack for further processing under special conditions.
|
||||
if (node is NamedValue || node is Index)
|
||||
{
|
||||
// Lazy initialize the pattern segments
|
||||
if (segmentedPatterns is null)
|
||||
{
|
||||
segmentedPatterns = new Stack<IExpressionNode>[patterns.Length];
|
||||
var parser = new ExpressionParser();
|
||||
for (var i = 0; i < patterns.Length; i++)
|
||||
{
|
||||
var pattern = patterns[i];
|
||||
var patternTree = parser.ValidateSyntax(pattern, null);
|
||||
var patternSegments = GetMatchSegments(patternTree, out _);
|
||||
if (patternSegments.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Invalid context-match-pattern '{pattern}'");
|
||||
}
|
||||
segmentedPatterns[i] = patternSegments;
|
||||
}
|
||||
}
|
||||
|
||||
// Match
|
||||
Match(node, segmentedPatterns, result, out var needsFurtherAnalysis);
|
||||
|
||||
// Push nested nodes that need further analysis
|
||||
if (needsFurtherAnalysis?.Count > 0)
|
||||
{
|
||||
foreach (var nestedNode in needsFurtherAnalysis)
|
||||
{
|
||||
stack.Push(nestedNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Push children of any other container node.
|
||||
else if (node is Container container && container.Parameters.Count > 0)
|
||||
{
|
||||
foreach (var child in container.Parameters)
|
||||
{
|
||||
stack.Push(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Attempts to match a node within a user-provided-expression against a set of patterns.
|
||||
//
|
||||
// For example consider the user-provided-expression "github.event.base_ref || github.event.before"
|
||||
// The Match method would be called twice, once for the sub-expression "github.event.base_ref" and
|
||||
// once for the sub-expression "github.event.before".
|
||||
private static void Match(
|
||||
IExpressionNode node,
|
||||
Stack<IExpressionNode>[] patterns,
|
||||
Boolean[] result,
|
||||
out List<ExpressionNode> needsFurtherAnalysis)
|
||||
{
|
||||
var nodeSegments = GetMatchSegments(node, out needsFurtherAnalysis);
|
||||
|
||||
if (nodeSegments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var nodeNamedValue = nodeSegments.Peek() as NamedValue;
|
||||
var originalNodeSegments = nodeSegments;
|
||||
|
||||
for (var i = 0; i < patterns.Length; i++)
|
||||
{
|
||||
var patternSegments = patterns[i];
|
||||
var patternNamedValue = patternSegments.Peek() as NamedValue;
|
||||
|
||||
// Compare the named-value
|
||||
if (String.Equals(nodeNamedValue.Name, patternNamedValue.Name, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Clone the stacks before mutating
|
||||
nodeSegments = new Stack<IExpressionNode>(originalNodeSegments.Reverse()); // Push reverse to preserve order
|
||||
nodeSegments.Pop();
|
||||
patternSegments = new Stack<IExpressionNode>(patternSegments.Reverse()); // Push reverse to preserve order
|
||||
patternSegments.Pop();
|
||||
|
||||
// Walk the stacks
|
||||
while (true)
|
||||
{
|
||||
// Every pattern segment was matched
|
||||
if (patternSegments.Count == 0)
|
||||
{
|
||||
result[i] = true;
|
||||
break;
|
||||
}
|
||||
// Every node segment was matched. Treat the pattern as matched. There is not
|
||||
// enough information to determine whether the property is required; assume it is.
|
||||
// For example, consider the pattern "github.event" and the expression "toJson(github)".
|
||||
// In this example the function requires the full structure of the named-value.
|
||||
else if (nodeSegments.Count == 0)
|
||||
{
|
||||
result[i] = true;
|
||||
break;
|
||||
}
|
||||
|
||||
var nodeSegment = nodeSegments.Pop();
|
||||
var patternSegment = patternSegments.Pop();
|
||||
|
||||
// The behavior of a wildcard varies depending on whether the left operand
|
||||
// is an array or an object. For simplicity, treat the pattern as matched.
|
||||
if (nodeSegment is Wildcard)
|
||||
{
|
||||
result[i] = true;
|
||||
break;
|
||||
}
|
||||
// Treat a wildcard pattern segment as matching any literal segment
|
||||
else if (patternSegment is Wildcard)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert literals to string and compare
|
||||
var nodeLiteral = nodeSegment as Literal;
|
||||
var nodeEvaluationResult = EvaluationResult.CreateIntermediateResult(null, nodeLiteral.Value);
|
||||
var nodeString = nodeEvaluationResult.ConvertToString();
|
||||
var patternLiteral = patternSegment as Literal;
|
||||
var patternEvaluationResult = EvaluationResult.CreateIntermediateResult(null, patternLiteral.Value);
|
||||
var patternString = patternEvaluationResult.ConvertToString();
|
||||
if (String.Equals(nodeString, patternString, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert to number and compare
|
||||
var nodeNumber = nodeEvaluationResult.ConvertToNumber();
|
||||
if (!Double.IsNaN(nodeNumber) && nodeNumber >= 0d && nodeNumber <= (Double)Int32.MaxValue)
|
||||
{
|
||||
var patternNumber = patternEvaluationResult.ConvertToNumber();
|
||||
if (!Double.IsNaN(patternNumber) && patternNumber >= 0 && patternNumber <= (Double)Int32.MaxValue)
|
||||
{
|
||||
nodeNumber = Math.Floor(nodeNumber);
|
||||
patternNumber = Math.Floor(patternNumber);
|
||||
if (nodeNumber == patternNumber)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not matched
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This function is used to convert a pattern or a user-provided-expression into a
|
||||
// consistent structure for easy comparison. The result is a stack containing only
|
||||
// nodes of type NamedValue, Literal, or Wildcard. All Index nodes are discarded.
|
||||
//
|
||||
// For example, consider the pattern "needs.*.outputs". The expression tree looks like:
|
||||
// Index(
|
||||
// Index(
|
||||
// NamedValue:needs,
|
||||
// Wildcard:*
|
||||
// ),
|
||||
// Literal:"outputs"
|
||||
// )
|
||||
// The result would be:
|
||||
// [
|
||||
// NamedValue:needs
|
||||
// Wildcard:*
|
||||
// Literal:"outputs"
|
||||
// ]
|
||||
//
|
||||
// Any nested expression trees that require further analysis, are returned separately.
|
||||
// For example, consider the expression "needs.build.outputs[github.event.base_ref]"
|
||||
// The result would be:
|
||||
// [
|
||||
// NamedValue:needs
|
||||
// Literal:"build"
|
||||
// Literal:"outputs"
|
||||
// ]
|
||||
// And the nested expression tree "github.event.base_ref" would be tracked as needing
|
||||
// further analysis.
|
||||
private static Stack<IExpressionNode> GetMatchSegments(
|
||||
IExpressionNode node,
|
||||
out List<ExpressionNode> needsFurtherAnalysis)
|
||||
{
|
||||
var result = new Stack<IExpressionNode>();
|
||||
needsFurtherAnalysis = new List<ExpressionNode>();
|
||||
|
||||
// Node is a named-value
|
||||
if (node is NamedValue)
|
||||
{
|
||||
result.Push(node);
|
||||
}
|
||||
// Node is an index
|
||||
else if (node is Index index)
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
//
|
||||
// Parameter 1
|
||||
//
|
||||
var parameter1 = index.Parameters[1];
|
||||
|
||||
// Treat anything other than literal as a wildcard
|
||||
result.Push(parameter1 is Literal ? parameter1 : new Wildcard());
|
||||
|
||||
// Further analysis required by the caller if parameter 1 is a Function/Operator/NamedValue
|
||||
if (parameter1 is Container || parameter1 is NamedValue)
|
||||
{
|
||||
needsFurtherAnalysis.Add(parameter1);
|
||||
}
|
||||
|
||||
//
|
||||
// Parameter 0
|
||||
//
|
||||
var parameter0 = index.Parameters[0];
|
||||
|
||||
// Parameter 0 is a named-value
|
||||
if (parameter0 is NamedValue)
|
||||
{
|
||||
result.Push(parameter0);
|
||||
break;
|
||||
}
|
||||
// Parameter 0 is an index
|
||||
else if (parameter0 is Index index2)
|
||||
{
|
||||
index = index2;
|
||||
}
|
||||
// Otherwise clear
|
||||
else
|
||||
{
|
||||
result.Clear();
|
||||
|
||||
// Further analysis required by the caller if parameter 0 is a Function/Operator
|
||||
if (parameter0 is Container)
|
||||
{
|
||||
needsFurtherAnalysis.Add(parameter0);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
13
src/Sdk/Expressions/IFunctionInfo.cs
Normal file
13
src/Sdk/Expressions/IFunctionInfo.cs
Normal file
@@ -0,0 +1,13 @@
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public interface IFunctionInfo
|
||||
{
|
||||
String Name { get; }
|
||||
Int32 MinParameters { get; }
|
||||
Int32 MaxParameters { get; }
|
||||
Function CreateNode();
|
||||
}
|
||||
}
|
||||
11
src/Sdk/Expressions/INamedValueInfo.cs
Normal file
11
src/Sdk/Expressions/INamedValueInfo.cs
Normal file
@@ -0,0 +1,11 @@
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public interface INamedValueInfo
|
||||
{
|
||||
String Name { get; }
|
||||
NamedValue CreateNode();
|
||||
}
|
||||
}
|
||||
12
src/Sdk/Expressions/ISecretMasker.cs
Normal file
12
src/Sdk/Expressions/ISecretMasker.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
/// <summary>
|
||||
/// Used to mask secrets from trace messages and exception messages
|
||||
/// </summary>
|
||||
public interface ISecretMasker
|
||||
{
|
||||
String MaskSecrets(String input);
|
||||
}
|
||||
}
|
||||
10
src/Sdk/Expressions/ITraceWriter.cs
Normal file
10
src/Sdk/Expressions/ITraceWriter.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public interface ITraceWriter
|
||||
{
|
||||
void Info(String message);
|
||||
void Verbose(String message);
|
||||
}
|
||||
}
|
||||
21
src/Sdk/Expressions/NamedValueInfo.cs
Normal file
21
src/Sdk/Expressions/NamedValueInfo.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Sdk;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public class NamedValueInfo<T> : INamedValueInfo
|
||||
where T : NamedValue, new()
|
||||
{
|
||||
public NamedValueInfo(String name)
|
||||
{
|
||||
Name = name;
|
||||
}
|
||||
|
||||
public String Name { get; }
|
||||
|
||||
public NamedValue CreateNode()
|
||||
{
|
||||
return new T();
|
||||
}
|
||||
}
|
||||
}
|
||||
12
src/Sdk/Expressions/NoOpSecretMasker.cs
Normal file
12
src/Sdk/Expressions/NoOpSecretMasker.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
internal sealed class NoOpSecretMasker : ISecretMasker
|
||||
{
|
||||
public String MaskSecrets(String input)
|
||||
{
|
||||
return input;
|
||||
}
|
||||
}
|
||||
}
|
||||
68
src/Sdk/Expressions/ParseException.cs
Normal file
68
src/Sdk/Expressions/ParseException.cs
Normal file
@@ -0,0 +1,68 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using GitHub.Actions.Expressions.Tokens;
|
||||
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
public sealed class ParseException : ExpressionException
|
||||
{
|
||||
internal ParseException(ParseExceptionKind kind, Token token, String expression)
|
||||
: base(secretMasker: null, message: String.Empty)
|
||||
{
|
||||
Expression = expression;
|
||||
Kind = kind;
|
||||
RawToken = token?.RawValue;
|
||||
TokenIndex = token?.Index ?? 0;
|
||||
String description;
|
||||
switch (kind)
|
||||
{
|
||||
case ParseExceptionKind.ExceededMaxDepth:
|
||||
description = $"Exceeded max expression depth {ExpressionConstants.MaxDepth}";
|
||||
break;
|
||||
case ParseExceptionKind.ExceededMaxLength:
|
||||
description = $"Exceeded max expression length {ExpressionConstants.MaxLength}";
|
||||
break;
|
||||
case ParseExceptionKind.TooFewParameters:
|
||||
description = "Too few parameters supplied";
|
||||
break;
|
||||
case ParseExceptionKind.TooManyParameters:
|
||||
description = "Too many parameters supplied";
|
||||
break;
|
||||
case ParseExceptionKind.UnexpectedEndOfExpression:
|
||||
description = "Unexpected end of expression";
|
||||
break;
|
||||
case ParseExceptionKind.UnexpectedSymbol:
|
||||
description = "Unexpected symbol";
|
||||
break;
|
||||
case ParseExceptionKind.UnrecognizedFunction:
|
||||
description = "Unrecognized function";
|
||||
break;
|
||||
case ParseExceptionKind.UnrecognizedNamedValue:
|
||||
description = "Unrecognized named-value";
|
||||
break;
|
||||
default: // Should never reach here.
|
||||
throw new Exception($"Unexpected parse exception kind '{kind}'.");
|
||||
}
|
||||
|
||||
if (token == null)
|
||||
{
|
||||
Message = description;
|
||||
}
|
||||
else
|
||||
{
|
||||
Message = $"{description}: '{RawToken}'. Located at position {TokenIndex + 1} within expression: {Expression}";
|
||||
}
|
||||
}
|
||||
|
||||
internal String Expression { get; }
|
||||
|
||||
internal ParseExceptionKind Kind { get; }
|
||||
|
||||
internal String RawToken { get; }
|
||||
|
||||
internal Int32 TokenIndex { get; }
|
||||
|
||||
public sealed override String Message { get; }
|
||||
}
|
||||
}
|
||||
14
src/Sdk/Expressions/ParseExceptionKind.cs
Normal file
14
src/Sdk/Expressions/ParseExceptionKind.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
namespace GitHub.Actions.Expressions
|
||||
{
|
||||
internal enum ParseExceptionKind
|
||||
{
|
||||
ExceededMaxDepth,
|
||||
ExceededMaxLength,
|
||||
TooFewParameters,
|
||||
TooManyParameters,
|
||||
UnexpectedEndOfExpression,
|
||||
UnexpectedSymbol,
|
||||
UnrecognizedFunction,
|
||||
UnrecognizedNamedValue,
|
||||
}
|
||||
}
|
||||
277
src/Sdk/Expressions/Resources/ExpressionResources.cs
Normal file
277
src/Sdk/Expressions/Resources/ExpressionResources.cs
Normal file
@@ -0,0 +1,277 @@
|
||||
// <auto-generated/>
|
||||
// *** AUTOMATICALLY GENERATED BY GenResourceClass -- DO NOT EDIT!!! ***
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.ComponentModel;
|
||||
using System.Globalization;
|
||||
using System.Reflection;
|
||||
using System.Resources;
|
||||
|
||||
namespace GitHub.Actions.Expressions {
|
||||
|
||||
|
||||
internal static class ExpressionResources
|
||||
{
|
||||
|
||||
|
||||
//********************************************************************************************
|
||||
/// Creates the resource manager instance.
|
||||
//********************************************************************************************
|
||||
static ExpressionResources()
|
||||
{
|
||||
s_resMgr = new ResourceManager("GitHub.Actions.Expressions.ExpressionResources", typeof(ExpressionResources).GetTypeInfo().Assembly);
|
||||
}
|
||||
|
||||
public static ResourceManager Manager
|
||||
{
|
||||
get
|
||||
{
|
||||
return s_resMgr;
|
||||
}
|
||||
}
|
||||
|
||||
//********************************************************************************************
|
||||
/// Returns a localized string given a resource string name.
|
||||
//********************************************************************************************
|
||||
public static String Get(
|
||||
String resourceName)
|
||||
{
|
||||
return s_resMgr.GetString(resourceName, CultureInfo.CurrentUICulture);
|
||||
}
|
||||
|
||||
//********************************************************************************************
|
||||
/// Returns a localized integer given a resource string name.
|
||||
//********************************************************************************************
|
||||
public static int GetInt(
|
||||
String resourceName)
|
||||
{
|
||||
return (int)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
|
||||
}
|
||||
|
||||
//********************************************************************************************
|
||||
/// Returns a localized string given a resource string name.
|
||||
//********************************************************************************************
|
||||
public static bool GetBool(
|
||||
String resourceName)
|
||||
{
|
||||
return (bool)s_resMgr.GetObject(resourceName, CultureInfo.CurrentUICulture);
|
||||
}
|
||||
|
||||
|
||||
//********************************************************************************************
|
||||
/// A little helper function to alleviate some typing associated with loading resources and
|
||||
/// formatting the strings. In DEBUG builds, it also asserts that the number of format
|
||||
/// arguments and the length of args match.
|
||||
//********************************************************************************************
|
||||
private static String Format( // The formatted resource string.
|
||||
String resourceName, // The name of the resource.
|
||||
params Object[] args) // Arguments to format.
|
||||
{
|
||||
String resource = Get(resourceName);
|
||||
|
||||
#if DEBUG
|
||||
// Check to make sure that the number of format string arguments matches the number of
|
||||
// arguments passed in.
|
||||
int formatArgCount = 0;
|
||||
bool[] argSeen = new bool[100];
|
||||
for (int i = 0; i < resource.Length; i++)
|
||||
{
|
||||
if (resource[i] == '{')
|
||||
{
|
||||
if (i + 1 < resource.Length &&
|
||||
resource[i + 1] == '{')
|
||||
{
|
||||
i++; // Skip the escaped curly braces.
|
||||
}
|
||||
else
|
||||
{
|
||||
// Move past the curly brace and leading whitespace.
|
||||
i++;
|
||||
while (Char.IsWhiteSpace(resource[i]))
|
||||
{
|
||||
i++;
|
||||
}
|
||||
|
||||
// Get the argument number.
|
||||
int length = 0;
|
||||
while (i + length < resource.Length && Char.IsDigit(resource[i + length]))
|
||||
{
|
||||
length++;
|
||||
}
|
||||
|
||||
// Record it if it hasn't already been seen.
|
||||
int argNumber = int.Parse(resource.Substring(i, length), CultureInfo.InvariantCulture);
|
||||
if (!argSeen[argNumber])
|
||||
{
|
||||
formatArgCount++; // Count it as a formatting argument.
|
||||
argSeen[argNumber] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Debug.Assert(args != null || formatArgCount == 0,
|
||||
String.Format(CultureInfo.InvariantCulture, "The number of format arguments is {0}, but the args parameter is null.", formatArgCount));
|
||||
Debug.Assert(args == null || formatArgCount == args.Length,
|
||||
String.Format(CultureInfo.InvariantCulture, "Coding error using resource \"{0}\": The number of format arguments {1} != number of args {2}",
|
||||
resourceName, formatArgCount, args != null ? args.Length : 0));
|
||||
#endif // DEBUG
|
||||
|
||||
|
||||
if (args == null)
|
||||
{
|
||||
return resource;
|
||||
}
|
||||
|
||||
// If there are any DateTime structs in the arguments, we need to bracket them
|
||||
// to make sure they are within the supported range of the current calendar.
|
||||
for (int i = 0; i < args.Length; i++)
|
||||
{
|
||||
// DateTime is a struct, we cannot use the as operator and null check.
|
||||
if (args[i] is DateTime)
|
||||
{
|
||||
DateTime dateTime = (DateTime)args[i];
|
||||
|
||||
// We need to fetch the calendar on each Format call since it may change.
|
||||
// Since we don't have more than one DateTime for resource, do not
|
||||
// bother to cache this for the duration of the for loop.
|
||||
Calendar calendar = DateTimeFormatInfo.CurrentInfo.Calendar;
|
||||
if (dateTime > calendar.MaxSupportedDateTime)
|
||||
{
|
||||
args[i] = calendar.MaxSupportedDateTime;
|
||||
}
|
||||
else if (dateTime < calendar.MinSupportedDateTime)
|
||||
{
|
||||
args[i] = calendar.MinSupportedDateTime;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return String.Format(CultureInfo.CurrentCulture, resource, args);
|
||||
}
|
||||
|
||||
// According to the documentation for the ResourceManager class, it should be sufficient to
|
||||
// create a single static instance. The following is an excerpt from the 1.1 documentation.
|
||||
// Using the methods of ResourceManager, a caller can access the resources for a particular
|
||||
// culture using the GetObject and GetString methods. By default, these methods return the
|
||||
// resource for the culture determined by the current cultural settings of the thread that made
|
||||
// the call.
|
||||
private static ResourceManager s_resMgr;
|
||||
|
||||
/// <summary>
|
||||
/// The maximum allowed memory size was exceeded while evaluating the following expression: {0}
|
||||
/// </summary>
|
||||
public static String ExceededAllowedMemory(object arg0) { return Format("ExceededAllowedMemory", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with a number.
|
||||
///
|
||||
/// Exceeded max expression depth {0}
|
||||
/// </summary>
|
||||
public static String ExceededMaxExpressionDepth(object arg0) { return Format("ExceededMaxExpressionDepth", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with a number.
|
||||
///
|
||||
/// Exceeded max expression length {0}
|
||||
/// </summary>
|
||||
public static String ExceededMaxExpressionLength(object arg0) { return Format("ExceededMaxExpressionLength", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// Expected a property name to follow the dereference operator '.'
|
||||
/// </summary>
|
||||
public static String ExpectedPropertyName() { return Get("ExpectedPropertyName"); }
|
||||
|
||||
/// <summary>
|
||||
/// Expected '(' to follow a function
|
||||
/// </summary>
|
||||
public static String ExpectedStartParameter() { return Get("ExpectedStartParameter"); }
|
||||
|
||||
/// <summary>
|
||||
/// The following format string references more arguments than were supplied: {0}
|
||||
/// </summary>
|
||||
public static String InvalidFormatArgIndex(object arg0) { return Format("InvalidFormatArgIndex", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// The format specifiers '{0}' are not valid for objects of type '{1}'
|
||||
/// </summary>
|
||||
public static String InvalidFormatSpecifiers(object arg0, object arg1) { return Format("InvalidFormatSpecifiers", arg0, arg1); }
|
||||
|
||||
/// <summary>
|
||||
/// The following format string is invalid: {0}
|
||||
/// </summary>
|
||||
public static String InvalidFormatString(object arg0) { return Format("InvalidFormatString", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// Key not found '{0}'
|
||||
/// </summary>
|
||||
public static String KeyNotFound(object arg0) { return Format("KeyNotFound", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with the error message
|
||||
///
|
||||
/// {0}.
|
||||
/// </summary>
|
||||
public static String ParseErrorWithFwlink(object arg0) { return Format("ParseErrorWithFwlink", arg0); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with the parse error message
|
||||
/// 1 is replaced with the token
|
||||
/// 2 is replaced with the character position within the string
|
||||
/// 3 is replaced with the full statement
|
||||
///
|
||||
/// {0}: '{1}'. Located at position {2} within expression: {3}.
|
||||
/// </summary>
|
||||
public static String ParseErrorWithTokenInfo(object arg0, object arg1, object arg2, object arg3) { return Format("ParseErrorWithTokenInfo", arg0, arg1, arg2, arg3); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with the from-type.
|
||||
/// 1 is replaced with the to-type.
|
||||
/// 2 is replaced with the value.
|
||||
///
|
||||
/// Unable to convert from {0} to {1}. Value: {2}
|
||||
/// </summary>
|
||||
public static String TypeCastError(object arg0, object arg1, object arg2) { return Format("TypeCastError", arg0, arg1, arg2); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with the from-type.
|
||||
/// 1 is replaced with the to-type.
|
||||
///
|
||||
/// Unable to convert from {0} to {1}.
|
||||
/// </summary>
|
||||
public static String TypeCastErrorNoValue(object arg0, object arg1) { return Format("TypeCastErrorNoValue", arg0, arg1); }
|
||||
|
||||
/// <summary>
|
||||
/// 0 is replaced with the from-type.
|
||||
/// 1 is replaced with the to-type.
|
||||
/// 2 is replaced with the value.
|
||||
/// 3 is replaced with the error message.
|
||||
///
|
||||
/// Unable to convert from {0} to {1}. Value: {2}. Error: {3}
|
||||
/// </summary>
|
||||
public static String TypeCastErrorWithError(object arg0, object arg1, object arg2, object arg3) { return Format("TypeCastErrorWithError", arg0, arg1, arg2, arg3); }
|
||||
|
||||
/// <summary>
|
||||
/// Unclosed function
|
||||
/// </summary>
|
||||
public static String UnclosedFunction() { return Get("UnclosedFunction"); }
|
||||
|
||||
/// <summary>
|
||||
/// Unclosed indexer
|
||||
/// </summary>
|
||||
public static String UnclosedIndexer() { return Get("UnclosedIndexer"); }
|
||||
|
||||
/// <summary>
|
||||
/// Unexpected symbol
|
||||
/// </summary>
|
||||
public static String UnexpectedSymbol() { return Get("UnexpectedSymbol"); }
|
||||
|
||||
/// <summary>
|
||||
/// Unrecognized value
|
||||
/// </summary>
|
||||
public static String UnrecognizedValue() { return Get("UnrecognizedValue"); }
|
||||
|
||||
|
||||
}
|
||||
|
||||
} // namespace
|
||||
190
src/Sdk/Expressions/Resources/ExpressionResources.resx
Normal file
190
src/Sdk/Expressions/Resources/ExpressionResources.resx
Normal file
@@ -0,0 +1,190 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<root>
|
||||
<!--
|
||||
Microsoft ResX Schema
|
||||
|
||||
Version 2.0
|
||||
|
||||
The primary goals of this format is to allow a simple XML format
|
||||
that is mostly human readable. The generation and parsing of the
|
||||
various data types are done through the TypeConverter classes
|
||||
associated with the data types.
|
||||
|
||||
Example:
|
||||
|
||||
... ado.net/XML headers & schema ...
|
||||
<resheader name="resmimetype">text/microsoft-resx</resheader>
|
||||
<resheader name="version">2.0</resheader>
|
||||
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
|
||||
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
|
||||
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
|
||||
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
|
||||
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
|
||||
<value>[base64 mime encoded serialized .NET Framework object]</value>
|
||||
</data>
|
||||
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
|
||||
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
|
||||
<comment>This is a comment</comment>
|
||||
</data>
|
||||
|
||||
There are any number of "resheader" rows that contain simple
|
||||
name/value pairs.
|
||||
|
||||
Each data row contains a name, and value. The row also contains a
|
||||
type or mimetype. Type corresponds to a .NET class that support
|
||||
text/value conversion through the TypeConverter architecture.
|
||||
Classes that don't support this are serialized and stored with the
|
||||
mimetype set.
|
||||
|
||||
The mimetype is used for serialized objects, and tells the
|
||||
ResXResourceReader how to depersist the object. This is currently not
|
||||
extensible. For a given mimetype the value must be set accordingly:
|
||||
|
||||
Note - application/x-microsoft.net.object.binary.base64 is the format
|
||||
that the ResXResourceWriter will generate, however the reader can
|
||||
read any of the formats listed below.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.binary.base64
|
||||
value : The object must be serialized with
|
||||
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.soap.base64
|
||||
value : The object must be serialized with
|
||||
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
|
||||
: and then encoded with base64 encoding.
|
||||
|
||||
mimetype: application/x-microsoft.net.object.bytearray.base64
|
||||
value : The object must be serialized into a byte array
|
||||
: using a System.ComponentModel.TypeConverter
|
||||
: and then encoded with base64 encoding.
|
||||
-->
|
||||
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
|
||||
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
|
||||
<xsd:element name="root" msdata:IsDataSet="true">
|
||||
<xsd:complexType>
|
||||
<xsd:choice maxOccurs="unbounded">
|
||||
<xsd:element name="metadata">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" use="required" type="xsd:string" />
|
||||
<xsd:attribute name="type" type="xsd:string" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" />
|
||||
<xsd:attribute ref="xml:space" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="assembly">
|
||||
<xsd:complexType>
|
||||
<xsd:attribute name="alias" type="xsd:string" />
|
||||
<xsd:attribute name="name" type="xsd:string" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="data">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
|
||||
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
|
||||
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
|
||||
<xsd:attribute ref="xml:space" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
<xsd:element name="resheader">
|
||||
<xsd:complexType>
|
||||
<xsd:sequence>
|
||||
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
|
||||
</xsd:sequence>
|
||||
<xsd:attribute name="name" type="xsd:string" use="required" />
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:choice>
|
||||
</xsd:complexType>
|
||||
</xsd:element>
|
||||
</xsd:schema>
|
||||
<resheader name="resmimetype">
|
||||
<value>text/microsoft-resx</value>
|
||||
</resheader>
|
||||
<resheader name="version">
|
||||
<value>2.0</value>
|
||||
</resheader>
|
||||
<resheader name="reader">
|
||||
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
<resheader name="writer">
|
||||
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
|
||||
</resheader>
|
||||
<data name="ExceededAllowedMemory" xml:space="preserve">
|
||||
<value>The maximum allowed memory size was exceeded while evaluating the following expression: {0}</value>
|
||||
</data>
|
||||
<data name="ExceededMaxExpressionDepth" xml:space="preserve">
|
||||
<value>Exceeded max expression depth {0}</value>
|
||||
<comment>0 is replaced with a number.</comment>
|
||||
</data>
|
||||
<data name="ExceededMaxExpressionLength" xml:space="preserve">
|
||||
<value>Exceeded max expression length {0}</value>
|
||||
<comment>0 is replaced with a number.</comment>
|
||||
</data>
|
||||
<data name="ExpectedPropertyName" xml:space="preserve">
|
||||
<value>Expected a property name to follow the dereference operator '.'</value>
|
||||
</data>
|
||||
<data name="ExpectedStartParameter" xml:space="preserve">
|
||||
<value>Expected '(' to follow a function</value>
|
||||
</data>
|
||||
<data name="InvalidFormatArgIndex" xml:space="preserve">
|
||||
<value>The following format string references more arguments than were supplied: {0}</value>
|
||||
</data>
|
||||
<data name="InvalidFormatSpecifiers" xml:space="preserve">
|
||||
<value>The format specifiers '{0}' are not valid for objects of type '{1}'</value>
|
||||
</data>
|
||||
<data name="InvalidFormatString" xml:space="preserve">
|
||||
<value>The following format string is invalid: {0}</value>
|
||||
</data>
|
||||
<data name="KeyNotFound" xml:space="preserve">
|
||||
<value>Key not found '{0}'</value>
|
||||
</data>
|
||||
<data name="ParseErrorWithFwlink" xml:space="preserve">
|
||||
<value>{0}.</value>
|
||||
<comment>0 is replaced with the error message</comment>
|
||||
</data>
|
||||
<data name="ParseErrorWithTokenInfo" xml:space="preserve">
|
||||
<value>{0}: '{1}'. Located at position {2} within expression: {3}.</value>
|
||||
<comment>0 is replaced with the parse error message
|
||||
1 is replaced with the token
|
||||
2 is replaced with the character position within the string
|
||||
3 is replaced with the full statement</comment>
|
||||
</data>
|
||||
<data name="TypeCastError" xml:space="preserve">
|
||||
<value>Unable to convert from {0} to {1}. Value: {2}</value>
|
||||
<comment>0 is replaced with the from-type.
|
||||
1 is replaced with the to-type.
|
||||
2 is replaced with the value.</comment>
|
||||
</data>
|
||||
<data name="TypeCastErrorNoValue" xml:space="preserve">
|
||||
<value>Unable to convert from {0} to {1}.</value>
|
||||
<comment>0 is replaced with the from-type.
|
||||
1 is replaced with the to-type.</comment>
|
||||
</data>
|
||||
<data name="TypeCastErrorWithError" xml:space="preserve">
|
||||
<value>Unable to convert from {0} to {1}. Value: {2}. Error: {3}</value>
|
||||
<comment>0 is replaced with the from-type.
|
||||
1 is replaced with the to-type.
|
||||
2 is replaced with the value.
|
||||
3 is replaced with the error message.</comment>
|
||||
</data>
|
||||
<data name="UnclosedFunction" xml:space="preserve">
|
||||
<value>Unclosed function</value>
|
||||
</data>
|
||||
<data name="UnclosedIndexer" xml:space="preserve">
|
||||
<value>Unclosed indexer</value>
|
||||
</data>
|
||||
<data name="UnexpectedSymbol" xml:space="preserve">
|
||||
<value>Unexpected symbol</value>
|
||||
</data>
|
||||
<data name="UnrecognizedValue" xml:space="preserve">
|
||||
<value>Unrecognized value</value>
|
||||
</data>
|
||||
</root>
|
||||
17
src/Sdk/Expressions/Sdk/Container.cs
Normal file
17
src/Sdk/Expressions/Sdk/Container.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
public abstract class Container : ExpressionNode
|
||||
{
|
||||
public IReadOnlyList<ExpressionNode> Parameters => m_parameters.AsReadOnly();
|
||||
|
||||
public void AddParameter(ExpressionNode node)
|
||||
{
|
||||
m_parameters.Add(node);
|
||||
node.Container = this;
|
||||
}
|
||||
|
||||
private readonly List<ExpressionNode> m_parameters = new List<ExpressionNode>();
|
||||
}
|
||||
}
|
||||
79
src/Sdk/Expressions/Sdk/EvaluationContext.cs
Normal file
79
src/Sdk/Expressions/Sdk/EvaluationContext.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
public sealed class EvaluationContext
|
||||
{
|
||||
internal EvaluationContext(
|
||||
ITraceWriter trace,
|
||||
ISecretMasker secretMasker,
|
||||
Object state,
|
||||
EvaluationOptions options,
|
||||
ExpressionNode node)
|
||||
{
|
||||
Trace = trace ?? throw new ArgumentNullException(nameof(trace));
|
||||
SecretMasker = secretMasker ?? throw new ArgumentNullException(nameof(secretMasker));
|
||||
State = state;
|
||||
|
||||
// Copy the options
|
||||
options = new EvaluationOptions(copy: options);
|
||||
if (options.MaxMemory == 0)
|
||||
{
|
||||
// Set a reasonable default max memory
|
||||
options.MaxMemory = 1048576; // 1 mb
|
||||
}
|
||||
if (options.MaxCacheMemory <= 0)
|
||||
{
|
||||
// Set a reasonable default max cache bytes
|
||||
options.MaxCacheMemory = 1048576; // 1 mb
|
||||
}
|
||||
Options = options;
|
||||
Memory = new EvaluationMemory(options.MaxMemory, node);
|
||||
|
||||
m_traceResults = new Dictionary<ExpressionNode, String>();
|
||||
m_traceMemory = new MemoryCounter(null, options.MaxCacheMemory);
|
||||
}
|
||||
|
||||
public ITraceWriter Trace { get; }
|
||||
|
||||
public ISecretMasker SecretMasker { get; }
|
||||
|
||||
public Object State { get; }
|
||||
|
||||
internal EvaluationMemory Memory { get; }
|
||||
|
||||
internal EvaluationOptions Options { get; }
|
||||
|
||||
internal void SetTraceResult(
|
||||
ExpressionNode node,
|
||||
EvaluationResult result)
|
||||
{
|
||||
// Remove if previously added. This typically should not happen. This could happen
|
||||
// due to a badly authored function. So we'll handle it and track memory correctly.
|
||||
if (m_traceResults.TryGetValue(node, out String oldValue))
|
||||
{
|
||||
m_traceMemory.Remove(oldValue);
|
||||
m_traceResults.Remove(node);
|
||||
}
|
||||
|
||||
// Check max memory
|
||||
String value = ExpressionUtility.FormatValue(SecretMasker, result);
|
||||
if (m_traceMemory.TryAdd(value))
|
||||
{
|
||||
// Store the result
|
||||
m_traceResults[node] = value;
|
||||
}
|
||||
}
|
||||
|
||||
internal Boolean TryGetTraceResult(ExpressionNode node, out String value)
|
||||
{
|
||||
return m_traceResults.TryGetValue(node, out value);
|
||||
}
|
||||
|
||||
private readonly Dictionary<ExpressionNode, String> m_traceResults = new Dictionary<ExpressionNode, String>();
|
||||
private readonly MemoryCounter m_traceMemory;
|
||||
}
|
||||
}
|
||||
111
src/Sdk/Expressions/Sdk/EvaluationMemory.cs
Normal file
111
src/Sdk/Expressions/Sdk/EvaluationMemory.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
/// <summary>
|
||||
/// This is an internal class only.
|
||||
///
|
||||
/// This class is used to track current memory consumption
|
||||
/// across the entire expression evaluation.
|
||||
/// </summary>
|
||||
internal sealed class EvaluationMemory
|
||||
{
|
||||
internal EvaluationMemory(
|
||||
Int32 maxBytes,
|
||||
ExpressionNode node)
|
||||
{
|
||||
m_maxAmount = maxBytes;
|
||||
m_node = node;
|
||||
}
|
||||
|
||||
internal void AddAmount(
|
||||
Int32 depth,
|
||||
Int32 bytes,
|
||||
Boolean trimDepth = false)
|
||||
{
|
||||
// Trim deeper depths
|
||||
if (trimDepth)
|
||||
{
|
||||
while (m_maxActiveDepth > depth)
|
||||
{
|
||||
var amount = m_depths[m_maxActiveDepth];
|
||||
|
||||
if (amount > 0)
|
||||
{
|
||||
// Sanity check
|
||||
if (amount > m_totalAmount)
|
||||
{
|
||||
throw new InvalidOperationException("Bytes to subtract exceeds total bytes");
|
||||
}
|
||||
|
||||
// Subtract from the total
|
||||
checked
|
||||
{
|
||||
m_totalAmount -= amount;
|
||||
}
|
||||
|
||||
// Reset the amount
|
||||
m_depths[m_maxActiveDepth] = 0;
|
||||
}
|
||||
|
||||
m_maxActiveDepth--;
|
||||
}
|
||||
}
|
||||
|
||||
// Grow the depths
|
||||
if (depth > m_maxActiveDepth)
|
||||
{
|
||||
// Grow the list
|
||||
while (m_depths.Count <= depth)
|
||||
{
|
||||
m_depths.Add(0);
|
||||
}
|
||||
|
||||
// Adjust the max active depth
|
||||
m_maxActiveDepth = depth;
|
||||
}
|
||||
|
||||
checked
|
||||
{
|
||||
// Add to the depth
|
||||
m_depths[depth] += bytes;
|
||||
|
||||
// Add to the total
|
||||
m_totalAmount += bytes;
|
||||
}
|
||||
|
||||
// Check max
|
||||
if (m_totalAmount > m_maxAmount)
|
||||
{
|
||||
throw new InvalidOperationException(ExpressionResources.ExceededAllowedMemory(m_node?.ConvertToExpression()));
|
||||
}
|
||||
}
|
||||
|
||||
internal static Int32 CalculateBytes(Object obj)
|
||||
{
|
||||
if (obj is String str)
|
||||
{
|
||||
// This measurement doesn't have to be perfect
|
||||
// https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/
|
||||
|
||||
checked
|
||||
{
|
||||
return c_stringBaseOverhead + ((str?.Length ?? 0) * sizeof(Char));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return c_minObjectSize;
|
||||
}
|
||||
}
|
||||
|
||||
private const Int32 c_minObjectSize = 24;
|
||||
private const Int32 c_stringBaseOverhead = 26;
|
||||
private readonly List<Int32> m_depths = new List<Int32>();
|
||||
private readonly Int32 m_maxAmount;
|
||||
private readonly ExpressionNode m_node;
|
||||
private Int32 m_maxActiveDepth = -1;
|
||||
private Int32 m_totalAmount;
|
||||
}
|
||||
}
|
||||
34
src/Sdk/Expressions/Sdk/EvaluationTraceWriter.cs
Normal file
34
src/Sdk/Expressions/Sdk/EvaluationTraceWriter.cs
Normal file
@@ -0,0 +1,34 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
internal sealed class EvaluationTraceWriter : ITraceWriter
|
||||
{
|
||||
public EvaluationTraceWriter(ITraceWriter trace, ISecretMasker secretMasker)
|
||||
{
|
||||
m_trace = trace;
|
||||
m_secretMasker = secretMasker ?? throw new ArgumentNullException(nameof(secretMasker));
|
||||
}
|
||||
|
||||
public void Info(String message)
|
||||
{
|
||||
if (m_trace != null)
|
||||
{
|
||||
message = m_secretMasker.MaskSecrets(message);
|
||||
m_trace.Info(message);
|
||||
}
|
||||
}
|
||||
|
||||
public void Verbose(String message)
|
||||
{
|
||||
if (m_trace != null)
|
||||
{
|
||||
message = m_secretMasker.MaskSecrets(message);
|
||||
m_trace.Verbose(message);
|
||||
}
|
||||
}
|
||||
|
||||
private readonly ISecretMasker m_secretMasker;
|
||||
private readonly ITraceWriter m_trace;
|
||||
}
|
||||
}
|
||||
187
src/Sdk/Expressions/Sdk/ExpressionNode.cs
Normal file
187
src/Sdk/Expressions/Sdk/ExpressionNode.cs
Normal file
@@ -0,0 +1,187 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
public abstract class ExpressionNode : IExpressionNode
|
||||
{
|
||||
internal Container Container { get; set; }
|
||||
|
||||
internal Int32 Level { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// The name is used for tracing. Normally the parser will set the name. However if a node
|
||||
/// is added manually, then the name may not be set and will fallback to the type name.
|
||||
/// </summary>
|
||||
public String Name
|
||||
{
|
||||
get
|
||||
{
|
||||
return !String.IsNullOrEmpty(m_name) ? m_name : this.GetType().Name;
|
||||
}
|
||||
|
||||
set
|
||||
{
|
||||
m_name = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Indicates whether the evalation result should be stored on the context and used
|
||||
/// when the expanded result is traced.
|
||||
/// </summary>
|
||||
protected abstract Boolean TraceFullyExpanded { get; }
|
||||
|
||||
/// <summary>
|
||||
/// IExpressionNode entry point.
|
||||
/// </summary>
|
||||
EvaluationResult IExpressionNode.Evaluate(
|
||||
ITraceWriter trace,
|
||||
ISecretMasker secretMasker,
|
||||
Object state,
|
||||
EvaluationOptions options)
|
||||
{
|
||||
if (Container != null)
|
||||
{
|
||||
// Do not localize. This is an SDK consumer error.
|
||||
throw new NotSupportedException($"Expected {nameof(IExpressionNode)}.{nameof(Evaluate)} to be called on root node only.");
|
||||
}
|
||||
|
||||
|
||||
var originalSecretMasker = secretMasker;
|
||||
try
|
||||
{
|
||||
// Evaluate
|
||||
secretMasker = secretMasker ?? new NoOpSecretMasker();
|
||||
trace = new EvaluationTraceWriter(trace, secretMasker);
|
||||
var context = new EvaluationContext(trace, secretMasker, state, options, this);
|
||||
var originalExpression = ConvertToExpression();
|
||||
trace.Info($"Evaluating: {originalExpression}");
|
||||
var result = Evaluate(context);
|
||||
|
||||
// Trace the result
|
||||
TraceTreeResult(context, originalExpression, result.Value, result.Kind);
|
||||
|
||||
return result;
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (secretMasker != null && secretMasker != originalSecretMasker)
|
||||
{
|
||||
(secretMasker as IDisposable)?.Dispose();
|
||||
secretMasker = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This function is intended only for ExpressionNode authors to call. The EvaluationContext
|
||||
/// caches result-state specific to the evaluation instance.
|
||||
/// </summary>
|
||||
public EvaluationResult Evaluate(EvaluationContext context)
|
||||
{
|
||||
// Evaluate
|
||||
Level = Container == null ? 0 : Container.Level + 1;
|
||||
TraceVerbose(context, Level, $"Evaluating {Name}:");
|
||||
var coreResult = EvaluateCore(context, out ResultMemory coreMemory);
|
||||
|
||||
if (coreMemory == null)
|
||||
{
|
||||
coreMemory = new ResultMemory();
|
||||
}
|
||||
|
||||
// Convert to canonical value
|
||||
var val = ExpressionUtility.ConvertToCanonicalValue(coreResult, out ValueKind kind, out Object raw);
|
||||
|
||||
// The depth can be safely trimmed when the total size of the core result is known,
|
||||
// or when the total size of the core result can easily be determined.
|
||||
var trimDepth = coreMemory.IsTotal || (Object.ReferenceEquals(raw, null) && ExpressionUtility.IsPrimitive(kind));
|
||||
|
||||
// Account for the memory overhead of the core result
|
||||
var coreBytes = coreMemory.Bytes ?? EvaluationMemory.CalculateBytes(raw ?? val);
|
||||
context.Memory.AddAmount(Level, coreBytes, trimDepth);
|
||||
|
||||
// Account for the memory overhead of the conversion result
|
||||
if (!Object.ReferenceEquals(raw, null))
|
||||
{
|
||||
var conversionBytes = EvaluationMemory.CalculateBytes(val);
|
||||
context.Memory.AddAmount(Level, conversionBytes);
|
||||
}
|
||||
|
||||
var result = new EvaluationResult(context, Level, val, kind, raw);
|
||||
|
||||
// Store the trace result
|
||||
if (this.TraceFullyExpanded)
|
||||
{
|
||||
context.SetTraceResult(this, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public abstract String ConvertToExpression();
|
||||
|
||||
internal abstract String ConvertToExpandedExpression(EvaluationContext context);
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates the node
|
||||
/// </summary>
|
||||
/// <param name="context">The current expression context</param>
|
||||
/// <param name="resultMemory">
|
||||
/// Helps determine how much memory is being consumed across the evaluation of the expression.
|
||||
/// </param>
|
||||
protected abstract Object EvaluateCore(
|
||||
EvaluationContext context,
|
||||
out ResultMemory resultMemory);
|
||||
|
||||
protected MemoryCounter CreateMemoryCounter(EvaluationContext context)
|
||||
{
|
||||
return new MemoryCounter(this, context.Options.MaxMemory);
|
||||
}
|
||||
|
||||
private void TraceTreeResult(
|
||||
EvaluationContext context,
|
||||
String originalExpression,
|
||||
Object result,
|
||||
ValueKind kind)
|
||||
{
|
||||
// Get the expanded expression
|
||||
String expandedExpression = ConvertToExpandedExpression(context);
|
||||
|
||||
// Format the result
|
||||
String traceValue = ExpressionUtility.FormatValue(context.SecretMasker, result, kind);
|
||||
|
||||
// Only trace the expanded expression if it is meaningfully different (or if always showing)
|
||||
if (context.Options.AlwaysTraceExpanded ||
|
||||
(!String.Equals(expandedExpression, originalExpression, StringComparison.Ordinal) &&
|
||||
!String.Equals(expandedExpression, traceValue, StringComparison.Ordinal)))
|
||||
{
|
||||
if (!context.Options.AlwaysTraceExpanded &&
|
||||
kind == ValueKind.Number &&
|
||||
String.Equals(expandedExpression, $"'{traceValue}'", StringComparison.Ordinal))
|
||||
{
|
||||
// Don't bother tracing the expanded expression when the result is a number and the
|
||||
// expanded expresion is a precisely matching string.
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Trace.Info($"Expanded: {expandedExpression}");
|
||||
}
|
||||
}
|
||||
|
||||
// Always trace the result
|
||||
context.Trace.Info($"Result: {traceValue}");
|
||||
}
|
||||
|
||||
private static void TraceVerbose(
|
||||
EvaluationContext context,
|
||||
Int32 level,
|
||||
String message)
|
||||
{
|
||||
context.Trace.Verbose(String.Empty.PadLeft(level * 2, '.') + (message ?? String.Empty));
|
||||
}
|
||||
|
||||
private String m_name;
|
||||
}
|
||||
}
|
||||
295
src/Sdk/Expressions/Sdk/ExpressionUtility.cs
Normal file
295
src/Sdk/Expressions/Sdk/ExpressionUtility.cs
Normal file
@@ -0,0 +1,295 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
public static class ExpressionUtility
|
||||
{
|
||||
internal static Object ConvertToCanonicalValue(
|
||||
Object val,
|
||||
out ValueKind kind,
|
||||
out Object raw)
|
||||
{
|
||||
raw = null;
|
||||
|
||||
if (Object.ReferenceEquals(val, null))
|
||||
{
|
||||
kind = ValueKind.Null;
|
||||
return null;
|
||||
}
|
||||
else if (val is Boolean)
|
||||
{
|
||||
kind = ValueKind.Boolean;
|
||||
return val;
|
||||
}
|
||||
else if (val is Double)
|
||||
{
|
||||
kind = ValueKind.Number;
|
||||
return val;
|
||||
}
|
||||
else if (val is String)
|
||||
{
|
||||
kind = ValueKind.String;
|
||||
return val;
|
||||
}
|
||||
else if (val is INull n)
|
||||
{
|
||||
kind = ValueKind.Null;
|
||||
raw = val;
|
||||
return null;
|
||||
}
|
||||
else if (val is IBoolean boolean)
|
||||
{
|
||||
kind = ValueKind.Boolean;
|
||||
raw = val;
|
||||
return boolean.GetBoolean();
|
||||
}
|
||||
else if (val is INumber number)
|
||||
{
|
||||
kind = ValueKind.Number;
|
||||
raw = val;
|
||||
return number.GetNumber();
|
||||
}
|
||||
else if (val is IString str)
|
||||
{
|
||||
kind = ValueKind.String;
|
||||
raw = val;
|
||||
return str.GetString();
|
||||
}
|
||||
else if (val is IReadOnlyObject)
|
||||
{
|
||||
kind = ValueKind.Object;
|
||||
return val;
|
||||
}
|
||||
else if (val is IReadOnlyArray)
|
||||
{
|
||||
kind = ValueKind.Array;
|
||||
return val;
|
||||
}
|
||||
else if (!val.GetType().GetTypeInfo().IsClass)
|
||||
{
|
||||
if (val is Decimal || val is Byte || val is SByte || val is Int16 || val is UInt16 || val is Int32 || val is UInt32 || val is Int64 || val is UInt64 || val is Single)
|
||||
{
|
||||
kind = ValueKind.Number;
|
||||
return Convert.ToDouble(val);
|
||||
}
|
||||
else if (val is Enum)
|
||||
{
|
||||
var strVal = String.Format(CultureInfo.InvariantCulture, "{0:G}", val);
|
||||
if (Double.TryParse(strVal, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out Double doubleValue))
|
||||
{
|
||||
kind = ValueKind.Number;
|
||||
return doubleValue;
|
||||
}
|
||||
|
||||
kind = ValueKind.String;
|
||||
return strVal;
|
||||
}
|
||||
}
|
||||
|
||||
kind = ValueKind.Object;
|
||||
return val;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a string into it's parse token representation. Useful when programmatically constructing an expression.
|
||||
/// For example the string "hello world" returns 'hello world'. Note, null will return the null token; pass empty string
|
||||
/// if you want the empty string token instead.
|
||||
/// </summary>
|
||||
public static String ConvertToParseToken(String str)
|
||||
{
|
||||
if (str == null)
|
||||
{
|
||||
return FormatValue(null, null, ValueKind.Null);
|
||||
}
|
||||
|
||||
return FormatValue(null, str, ValueKind.String);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a string into it's parse token representation. Useful when programmatically constructing an expression.
|
||||
/// </summary>
|
||||
public static String ConvertToParseToken(Double d)
|
||||
{
|
||||
return FormatValue(null, d, ValueKind.Number);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a string into it's parse token representation. Useful when programmatically constructing an expression.
|
||||
/// </summary>
|
||||
public static String ConvertToParseToken(Boolean b)
|
||||
{
|
||||
return FormatValue(null, b, ValueKind.Boolean);
|
||||
}
|
||||
|
||||
internal static String FormatValue(
|
||||
ISecretMasker secretMasker,
|
||||
EvaluationResult evaluationResult)
|
||||
{
|
||||
return FormatValue(secretMasker, evaluationResult.Value, evaluationResult.Kind);
|
||||
}
|
||||
|
||||
internal static String FormatValue(
|
||||
ISecretMasker secretMasker,
|
||||
Object value,
|
||||
ValueKind kind)
|
||||
{
|
||||
switch (kind)
|
||||
{
|
||||
case ValueKind.Null:
|
||||
return ExpressionConstants.Null;
|
||||
|
||||
case ValueKind.Boolean:
|
||||
return ((Boolean)value) ? ExpressionConstants.True : ExpressionConstants.False;
|
||||
|
||||
case ValueKind.Number:
|
||||
var strNumber = ((Double)value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture);
|
||||
return secretMasker != null ? secretMasker.MaskSecrets(strNumber) : strNumber;
|
||||
|
||||
case ValueKind.String:
|
||||
// Mask secrets before string-escaping.
|
||||
var strValue = secretMasker != null ? secretMasker.MaskSecrets(value as String) : value as String;
|
||||
return $"'{StringEscape(strValue)}'";
|
||||
|
||||
case ValueKind.Array:
|
||||
case ValueKind.Object:
|
||||
return kind.ToString();
|
||||
|
||||
default: // Should never reach here.
|
||||
throw new NotSupportedException($"Unable to convert to expanded expression. Unexpected value kind: {kind}");
|
||||
}
|
||||
}
|
||||
|
||||
internal static bool IsLegalKeyword(String str)
|
||||
{
|
||||
if (String.IsNullOrEmpty(str))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var first = str[0];
|
||||
if ((first >= 'a' && first <= 'z') ||
|
||||
(first >= 'A' && first <= 'Z') ||
|
||||
first == '_')
|
||||
{
|
||||
for (var i = 1; i < str.Length; i++)
|
||||
{
|
||||
var c = str[i];
|
||||
if ((c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' ||
|
||||
c == '-')
|
||||
{
|
||||
// OK
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal static Boolean IsPrimitive(ValueKind kind)
|
||||
{
|
||||
switch (kind)
|
||||
{
|
||||
case ValueKind.Null:
|
||||
case ValueKind.Boolean:
|
||||
case ValueKind.Number:
|
||||
case ValueKind.String:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The rules here attempt to follow Javascript rules for coercing a string into a number
|
||||
/// for comparison. That is, the Number() function in Javascript.
|
||||
/// </summary>
|
||||
internal static Double ParseNumber(String str)
|
||||
{
|
||||
// Trim
|
||||
str = str?.Trim() ?? String.Empty;
|
||||
|
||||
// Empty
|
||||
if (String.IsNullOrEmpty(str))
|
||||
{
|
||||
return 0d;
|
||||
}
|
||||
// Try parse
|
||||
else if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
// Check for 0x[0-9a-fA-F]+
|
||||
else if (str[0] == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'x' &&
|
||||
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
|
||||
{
|
||||
// Try parse
|
||||
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integer))
|
||||
{
|
||||
return (Double)integer;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
}
|
||||
// Check for 0o[0-9]+
|
||||
else if (str[0] == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'o' &&
|
||||
str.Skip(2).All(x => x >= '0' && x <= '7'))
|
||||
{
|
||||
// Try parse
|
||||
var integer = default(Int32?);
|
||||
try
|
||||
{
|
||||
integer = Convert.ToInt32(str.Substring(2), 8);
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
catch (Exception)
|
||||
{
|
||||
}
|
||||
|
||||
// Success
|
||||
if (integer != null)
|
||||
{
|
||||
return (Double)integer.Value;
|
||||
}
|
||||
}
|
||||
// Infinity
|
||||
else if (String.Equals(str, ExpressionConstants.Infinity, StringComparison.Ordinal))
|
||||
{
|
||||
return Double.PositiveInfinity;
|
||||
}
|
||||
// -Infinity
|
||||
else if (String.Equals(str, ExpressionConstants.NegativeInfinity, StringComparison.Ordinal))
|
||||
{
|
||||
return Double.NegativeInfinity;
|
||||
}
|
||||
|
||||
// Otherwise NaN
|
||||
return Double.NaN;
|
||||
}
|
||||
|
||||
public static String StringEscape(String value)
|
||||
{
|
||||
return String.IsNullOrEmpty(value) ? String.Empty : value.Replace("'", "''");
|
||||
}
|
||||
}
|
||||
}
|
||||
43
src/Sdk/Expressions/Sdk/Function.cs
Normal file
43
src/Sdk/Expressions/Sdk/Function.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk
|
||||
{
|
||||
public abstract class Function : Container
|
||||
{
|
||||
/// <summary>
|
||||
/// Generally this should not be overridden. True indicates the result of the node is traced as part of the
|
||||
/// "expanded" trace information. Otherwise the node expression is printed, and parameters to the node may or
|
||||
/// may not be fully expanded - depending on each respective parameter's trace-fully-expanded setting.
|
||||
///
|
||||
/// The purpose is so the end user can understand how their expression expanded at run time. For example, consider
|
||||
/// the expression: eq(variables.publish, 'true'). The runtime-expanded expression may be: eq('true', 'true')
|
||||
/// </summary>
|
||||
protected override Boolean TraceFullyExpanded => true;
|
||||
|
||||
public sealed override String ConvertToExpression()
|
||||
{
|
||||
return String.Format(
|
||||
CultureInfo.InvariantCulture,
|
||||
"{0}({1})",
|
||||
Name,
|
||||
String.Join(", ", Parameters.Select(x => x.ConvertToExpression())));
|
||||
}
|
||||
|
||||
internal sealed override String ConvertToExpandedExpression(EvaluationContext context)
|
||||
{
|
||||
// Check if the result was stored
|
||||
if (context.TryGetTraceResult(this, out String result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
return String.Format(
|
||||
CultureInfo.InvariantCulture,
|
||||
"{0}({1})",
|
||||
Name,
|
||||
String.Join(", ", Parameters.Select(x => x.ConvertToExpandedExpression(context))));
|
||||
}
|
||||
}
|
||||
}
|
||||
46
src/Sdk/Expressions/Sdk/Functions/Contains.cs
Normal file
46
src/Sdk/Expressions/Sdk/Functions/Contains.cs
Normal file
@@ -0,0 +1,46 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
|
||||
namespace GitHub.Actions.Expressions.Sdk.Functions
|
||||
{
|
||||
internal sealed class Contains : Function
|
||||
{
|
||||
protected sealed override Boolean TraceFullyExpanded => false;
|
||||
|
||||
protected sealed override Object EvaluateCore(
|
||||
EvaluationContext context,
|
||||
out ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var left = Parameters[0].Evaluate(context);
|
||||
if (left.IsPrimitive)
|
||||
{
|
||||
var leftString = left.ConvertToString();
|
||||
|
||||
var right = Parameters[1].Evaluate(context);
|
||||
if (right.IsPrimitive)
|
||||
{
|
||||
var rightString = right.ConvertToString();
|
||||
return leftString.IndexOf(rightString, StringComparison.OrdinalIgnoreCase) >= 0;
|
||||
}
|
||||
}
|
||||
else if (left.TryGetCollectionInterface(out var collection) &&
|
||||
collection is IReadOnlyArray array &&
|
||||
array.Count > 0)
|
||||
{
|
||||
var right = Parameters[1].Evaluate(context);
|
||||
foreach (var item in array)
|
||||
{
|
||||
var itemResult = EvaluationResult.CreateIntermediateResult(context, item);
|
||||
if (right.AbstractEqual(itemResult))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user