mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
2 Commits
main
...
remove-use
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac7f83f61d | ||
|
|
d061d61093 |
@@ -4,13 +4,10 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "8.0.416"
|
||||
"version": "6.0.415"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/sshd:1": {
|
||||
"version": "latest"
|
||||
"version": "16"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/config.yml
vendored
6
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,13 +1,13 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 🛑 Request a feature in the runner application
|
||||
url: https://github.com/orgs/community/discussions/categories/actions
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions section on the Github Product Feedback page.
|
||||
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
|
||||
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
|
||||
- name: ✅ Support for GitHub Actions
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||
- name: ✅ Feedback and suggestions for GitHub Actions
|
||||
url: https://github.com/github/feedback/discussions/categories/actions
|
||||
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
|
||||
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
||||
- name: ‼️ GitHub Security Bug Bounty
|
||||
url: https://bounty.github.com/
|
||||
|
||||
25
.github/copilot-instructions.md
vendored
25
.github/copilot-instructions.md
vendored
@@ -1,25 +0,0 @@
|
||||
## Making changes
|
||||
|
||||
### Tests
|
||||
|
||||
Whenever possible, changes should be accompanied by non-trivial tests that meaningfully exercise the core functionality of the new code being introduced.
|
||||
|
||||
All tests are in the `Test/` directory at the repo root. Fast unit tests are in the `Test/L0` directory and by convention have the suffix `L0.cs`. For example: unit tests for a hypothetical `src/Runner.Worker/Foo.cs` would go in `src/Test/L0/Worker/FooL0.cs`.
|
||||
|
||||
Run tests using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh test
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
After editing .cs files, always format the code using this command:
|
||||
|
||||
```sh
|
||||
cd src && ./dev.sh format
|
||||
```
|
||||
|
||||
### Feature Flags
|
||||
|
||||
Wherever possible, all changes should be safeguarded by a feature flag; `Features` are declared in [Constants.cs](src/Runner.Common/Constants.cs).
|
||||
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,11 +5,6 @@ updates:
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
target-branch: "main"
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/src"
|
||||
schedule:
|
||||
|
||||
82
.github/workflows/build.yml
vendored
82
.github/workflows/build.yml
vendored
@@ -14,9 +14,6 @@ on:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
@@ -44,7 +41,7 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
@@ -53,7 +50,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -61,6 +58,29 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
exit $NeedUpdate
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
@@ -72,59 +92,17 @@ jobs:
|
||||
- name: Package Release
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
${{ matrix.devScript }} package Release
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: |
|
||||
_package
|
||||
|
||||
docker:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, ubuntu-24.04-arm ]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
docker_platform: linux/amd64
|
||||
- os: ubuntu-24.04-arm
|
||||
docker_platform: linux/arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Get latest runner version
|
||||
id: latest_runner
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const release = await github.rest.repos.getLatestRelease({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
});
|
||||
const version = release.data.tag_name.replace(/^v/, '');
|
||||
core.setOutput('version', version);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./images
|
||||
load: true
|
||||
platforms: ${{ matrix.docker_platform }}
|
||||
tags: |
|
||||
${{ github.sha }}:latest
|
||||
build-args: |
|
||||
RUNNER_VERSION=${{ steps.latest_runner.outputs.version }}
|
||||
|
||||
- name: Test Docker image
|
||||
run: |
|
||||
docker run --rm ${{ github.sha }}:latest ./run.sh --version
|
||||
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
2
.github/workflows/close-bugs-bot.yml
vendored
2
.github/workflows/close-bugs-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
|
||||
4
.github/workflows/close-features-bot.yml
vendored
4
.github/workflows/close-features-bot.yml
vendored
@@ -7,9 +7,9 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃"
|
||||
exempt-issue-labels: "keep"
|
||||
stale-issue-label: "actions-feature"
|
||||
only-labels: "actions-feature"
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -23,11 +23,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v4
|
||||
uses: github/codeql-action/init@v2
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
@@ -38,4 +38,4 @@ jobs:
|
||||
working-directory: src
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v4
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
211
.github/workflows/dependency-check.yml
vendored
211
.github/workflows/dependency-check.yml
vendored
@@ -1,211 +0,0 @@
|
||||
name: Dependency Status Check
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
check_type:
|
||||
description: "Type of dependency check"
|
||||
required: false
|
||||
default: "all"
|
||||
type: choice
|
||||
options:
|
||||
- all
|
||||
- node
|
||||
- dotnet
|
||||
- docker
|
||||
- npm
|
||||
schedule:
|
||||
- cron: "0 11 * * 1" # Weekly on Monday at 11 AM
|
||||
|
||||
jobs:
|
||||
dependency-status:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
node20-status: ${{ steps.check-versions.outputs.node20-status }}
|
||||
node24-status: ${{ steps.check-versions.outputs.node24-status }}
|
||||
dotnet-status: ${{ steps.check-versions.outputs.dotnet-status }}
|
||||
docker-status: ${{ steps.check-versions.outputs.docker-status }}
|
||||
buildx-status: ${{ steps.check-versions.outputs.buildx-status }}
|
||||
npm-vulnerabilities: ${{ steps.check-versions.outputs.npm-vulnerabilities }}
|
||||
open-dependency-prs: ${{ steps.check-prs.outputs.open-dependency-prs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Check dependency versions
|
||||
id: check-versions
|
||||
run: |
|
||||
echo "## Dependency Status Report" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Generated on: $(date)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Check Node versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "node" ]]; then
|
||||
echo "### Node.js Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
VERSIONS_JSON=$(curl -s https://raw.githubusercontent.com/actions/node-versions/main/versions-manifest.json)
|
||||
LATEST_NODE20=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("20.")) | .version' | head -1)
|
||||
LATEST_NODE24=$(echo "$VERSIONS_JSON" | jq -r '.[] | select(.version | startswith("24.")) | .version' | head -1)
|
||||
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
NODE20_STATUS="✅ up-to-date"
|
||||
NODE24_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NODE20_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NODE24_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Version | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 20 | $CURRENT_NODE20 | $LATEST_NODE20 | $NODE20_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Node 24 | $CURRENT_NODE24 | $LATEST_NODE24 | $NODE24_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "node20-status=$NODE20_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "node24-status=$NODE24_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check .NET version
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "dotnet" ]]; then
|
||||
echo "### .NET SDK Version" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_dotnet_version=$(jq -r .sdk.version ./src/global.json)
|
||||
current_major_minor=$(echo "$current_dotnet_version" | cut -d '.' -f 1,2)
|
||||
latest_dotnet_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/$current_major_minor/latest.version")
|
||||
|
||||
DOTNET_STATUS="✅ up-to-date"
|
||||
if [ "$current_dotnet_version" != "$latest_dotnet_version" ]; then
|
||||
DOTNET_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| .NET SDK | $current_dotnet_version | $latest_dotnet_version | $DOTNET_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "dotnet-status=$DOTNET_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check Docker versions
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "docker" ]]; then
|
||||
echo "### Docker Versions" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
current_docker=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
current_buildx=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
latest_docker=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
latest_buildx=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
DOCKER_STATUS="✅ up-to-date"
|
||||
BUILDX_STATUS="✅ up-to-date"
|
||||
|
||||
if [ "$current_docker" != "$latest_docker" ]; then
|
||||
DOCKER_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
if [ "$current_buildx" != "$latest_buildx" ]; then
|
||||
BUILDX_STATUS="⚠️ outdated"
|
||||
fi
|
||||
|
||||
echo "| Component | Current | Latest | Status |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|-----------|---------|--------|--------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker | $current_docker | $latest_docker | $DOCKER_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Docker Buildx | $current_buildx | $latest_buildx | $BUILDX_STATUS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
echo "docker-status=$DOCKER_STATUS" >> $GITHUB_OUTPUT
|
||||
echo "buildx-status=$BUILDX_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# Check npm vulnerabilities
|
||||
if [[ "${{ github.event.inputs.check_type }}" == "all" || "${{ github.event.inputs.check_type }}" == "npm" ]]; then
|
||||
echo "### NPM Security Audit" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
npm install --silent
|
||||
|
||||
AUDIT_OUTPUT=""
|
||||
AUDIT_EXIT_CODE=0
|
||||
# Run npm audit and capture output and exit code
|
||||
if ! AUDIT_OUTPUT=$(npm audit --json 2>&1); then
|
||||
AUDIT_EXIT_CODE=$?
|
||||
fi
|
||||
|
||||
# Check if output is valid JSON
|
||||
if echo "$AUDIT_OUTPUT" | jq . >/dev/null 2>&1; then
|
||||
VULN_COUNT=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.total // 0')
|
||||
# Ensure VULN_COUNT is a number
|
||||
VULN_COUNT=$(echo "$VULN_COUNT" | grep -o '[0-9]*' | head -1)
|
||||
VULN_COUNT=${VULN_COUNT:-0}
|
||||
|
||||
NPM_STATUS="✅ no vulnerabilities"
|
||||
if [ "$VULN_COUNT" -gt 0 ] 2>/dev/null; then
|
||||
NPM_STATUS="⚠️ $VULN_COUNT vulnerabilities found"
|
||||
|
||||
# Get vulnerability details
|
||||
HIGH_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.high // 0')
|
||||
CRITICAL_VULNS=$(echo "$AUDIT_OUTPUT" | jq '.metadata.vulnerabilities.critical // 0')
|
||||
|
||||
echo "| Severity | Count |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|----------|-------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Critical | $CRITICAL_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| High | $HIGH_VULNS |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No npm vulnerabilities found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
NPM_STATUS="❌ npm audit failed"
|
||||
echo "npm audit failed to run or returned invalid JSON ❌" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Exit code: $AUDIT_EXIT_CODE" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Output: $AUDIT_OUTPUT" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "npm-vulnerabilities=$NPM_STATUS" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check for open dependency PRs
|
||||
id: check-prs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
echo "### Open Dependency PRs" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Get open PRs with dependency label
|
||||
OPEN_PRS=$(gh pr list --label "dependencies" --state open --json number,title,url)
|
||||
PR_COUNT=$(echo "$OPEN_PRS" | jq '. | length')
|
||||
|
||||
if [ "$PR_COUNT" -gt 0 ]; then
|
||||
echo "Found $PR_COUNT open dependency PR(s):" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "$OPEN_PRS" | jq -r '.[] | "- [#\(.number)](\(.url)) \(.title)"' >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "No open dependency PRs found ✅" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "open-dependency-prs=$PR_COUNT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Check for open PRs with the \`dependency\` label before releases" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Review and merge dependency updates regularly" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Critical vulnerabilities should be addressed immediately" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Automated workflows run weekly to check for updates:**" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Node.js versions (Mondays at 6 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- NPM audit fix (Mondays at 7 AM)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- .NET SDK updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Docker/Buildx updates (Mondays at midnight)" >> $GITHUB_STEP_SUMMARY
|
||||
166
.github/workflows/docker-buildx-upgrade.yml
vendored
166
.github/workflows/docker-buildx-upgrade.yml
vendored
@@ -1,166 +0,0 @@
|
||||
name: "Docker/Buildx Version Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 1" # Run every Monday at midnight
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
check-versions:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
DOCKER_SHOULD_UPDATE: ${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}
|
||||
DOCKER_LATEST_VERSION: ${{ steps.check_docker_version.outputs.LATEST_VERSION }}
|
||||
DOCKER_CURRENT_VERSION: ${{ steps.check_docker_version.outputs.CURRENT_VERSION }}
|
||||
BUILDX_SHOULD_UPDATE: ${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}
|
||||
BUILDX_LATEST_VERSION: ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}
|
||||
BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Check Docker version
|
||||
id: check_docker_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Docker version from Dockerfile
|
||||
current_version=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Docker Engine version from Docker's download site
|
||||
# This gets the latest Linux static binary version which matches what's used in the Dockerfile
|
||||
latest_version=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/')
|
||||
|
||||
# Extra check to ensure we got a valid version
|
||||
if [[ ! $latest_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Failed to retrieve a valid Docker version"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check Buildx version
|
||||
id: check_buildx_version
|
||||
shell: bash
|
||||
run: |
|
||||
# Extract current Buildx version from Dockerfile
|
||||
current_version=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2)
|
||||
|
||||
# Fetch latest Buildx version
|
||||
latest_version=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//')
|
||||
|
||||
should_update=0
|
||||
[ "$current_version" != "$latest_version" ] && should_update=1
|
||||
|
||||
echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT
|
||||
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create annotations for versions
|
||||
run: |
|
||||
docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}"
|
||||
buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}"
|
||||
|
||||
# Show annotation if only Docker needs update
|
||||
if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation if only Buildx needs update
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then
|
||||
echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates."
|
||||
fi
|
||||
|
||||
# Show annotation when both are current
|
||||
if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then
|
||||
echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed."
|
||||
fi
|
||||
|
||||
update-versions:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
needs: [check-versions]
|
||||
if: ${{ needs.check-versions.outputs.DOCKER_SHOULD_UPDATE == 1 && needs.check-versions.outputs.BUILDX_SHOULD_UPDATE == 1 }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Update Docker version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Update Buildx version
|
||||
shell: bash
|
||||
run: |
|
||||
latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}"
|
||||
|
||||
# Update version in Dockerfile
|
||||
sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile
|
||||
|
||||
- name: Commit changes and create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Setup branch and commit information
|
||||
branch_name="feature/docker-buildx-upgrade"
|
||||
commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}"
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch or switch to it if it exists
|
||||
if git show-ref --quiet refs/remotes/origin/$branch_name; then
|
||||
git fetch origin
|
||||
git checkout -B "$branch_name" origin/$branch_name
|
||||
else
|
||||
git checkout -b "$branch_name"
|
||||
fi
|
||||
|
||||
# Commit and push changes
|
||||
git commit -a -m "$commit_message"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated Docker and Buildx version update:
|
||||
|
||||
- Docker: ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}
|
||||
- Buildx: ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} → ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}
|
||||
|
||||
This update ensures we're using the latest stable Docker and Buildx versions for security and performance improvements.
|
||||
|
||||
**Release notes:** https://docs.docker.com/engine/release-notes/
|
||||
|
||||
**Next steps:**
|
||||
- Review the version changes
|
||||
- Verify container builds work as expected
|
||||
- Test multi-platform builds if applicable
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "$pr_title" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "docker" \
|
||||
--body-file pr_body.txt
|
||||
241
.github/workflows/dotnet-upgrade.yml
vendored
241
.github/workflows/dotnet-upgrade.yml
vendored
@@ -2,20 +2,20 @@ name: "DotNet SDK Upgrade"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 8 * * 1" # Weekly on Monday at 8 AM UTC (independent of Node.js/NPM)
|
||||
- cron: '0 0 * * 1'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
dotnet-update:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
outputs:
|
||||
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
- name: Get current major minor version
|
||||
id: fetch_current_version
|
||||
shell: bash
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
|
||||
# check if git branch already exists for the upgrade
|
||||
branch_already_exists=0
|
||||
|
||||
|
||||
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||
then
|
||||
branch_already_exists=1
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||
- name: Create a warning annotation if no need to update
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||
- name: Update patch version
|
||||
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||
shell: bash
|
||||
@@ -84,22 +84,223 @@ jobs:
|
||||
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||
git push --set-upstream origin $branch_name
|
||||
|
||||
create-pr:
|
||||
needs: [dotnet-update]
|
||||
build-hashes:
|
||||
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||
needs: [dotnet-update]
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-arm64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
run: |
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
id: compute-hash
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
|
||||
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
|
||||
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
- name: update hash
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
shell: bash
|
||||
run: |
|
||||
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||
|
||||
if [ -n "$ExternalHash" ]; then
|
||||
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
fi
|
||||
|
||||
if [ -n "$DotNetRuntimeHash" ]; then
|
||||
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
fi
|
||||
- name: cache updated hashes
|
||||
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
|
||||
|
||||
hash-update:
|
||||
needs: [build-hashes]
|
||||
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
|
||||
outputs:
|
||||
# pass outputs from this job to create-pr for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --label "dependencies" --label "dependencies-weekly-check" --label "dependencies-not-dependabot" --label "dotnet" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Restore cached hashes - linux-x64
|
||||
id: cache-restore-linux-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm64
|
||||
id: cache-restore-linux-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - linux-arm
|
||||
id: cache-restore-linux-arm
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/linux-arm
|
||||
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-x64
|
||||
id: cache-restore-osx-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - osx-arm64
|
||||
id: cache-restore-osx-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/osx-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-x64
|
||||
id: cache-restore-win-x64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-x64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Restore cached hashes - win-arm64
|
||||
id: cache-restore-win-arm64
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
enableCrossOsArchive: true
|
||||
path: |
|
||||
./src/Misc/contentHash/externals/win-arm64
|
||||
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||
- name: Fetch cached computed hashes
|
||||
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
|
||||
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git commit -a -m "Update computed hashes"
|
||||
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
|
||||
create-pr:
|
||||
needs: [hash-update]
|
||||
outputs:
|
||||
# pass outputs from this job to run-tests for use
|
||||
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||
- name: Create Pull Request
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||
|
||||
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||
---
|
||||
|
||||
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||
|
||||
24
.github/workflows/lint.yml
vendored
Normal file
24
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Ensure full list of changed files within `super-linter`
|
||||
fetch-depth: 0
|
||||
- name: Run linters
|
||||
uses: github/super-linter@v4
|
||||
env:
|
||||
DEFAULT_BRANCH: ${{ github.base_ref }}
|
||||
EDITORCONFIG_FILE_NAME: .editorconfig
|
||||
LINTER_RULES_PATH: /src/
|
||||
VALIDATE_ALL_CODEBASE: false
|
||||
VALIDATE_CSHARP: true
|
||||
194
.github/workflows/node-upgrade.yml
vendored
194
.github/workflows/node-upgrade.yml
vendored
@@ -1,194 +0,0 @@
|
||||
name: Auto Update Node Version
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 1" # Weekly, every Monday
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-node:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Get latest Node versions
|
||||
id: node-versions
|
||||
run: |
|
||||
# Get latest Node.js releases from official GitHub releases
|
||||
echo "Fetching latest Node.js releases..."
|
||||
|
||||
# Get latest v20.x release
|
||||
LATEST_NODE20=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v20.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
# Get latest v24.x release
|
||||
LATEST_NODE24=$(curl -s https://api.github.com/repos/nodejs/node/releases | \
|
||||
jq -r '.[] | select(.tag_name | startswith("v24.")) | .tag_name' | \
|
||||
head -1 | sed 's/^v//')
|
||||
|
||||
echo "Found Node.js releases: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
|
||||
# Verify these versions are available in alpine_nodejs releases
|
||||
echo "Verifying availability in alpine_nodejs..."
|
||||
ALPINE_RELEASES=$(curl -s https://api.github.com/repos/actions/alpine_nodejs/releases | jq -r '.[].tag_name')
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE20$"; then
|
||||
echo "::warning title=Node 20 Fallback::Node 20 version $LATEST_NODE20 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v20 release
|
||||
LATEST_NODE20=$(echo "$ALPINE_RELEASES" | grep "^v20\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 20: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if ! echo "$ALPINE_RELEASES" | grep -q "^v$LATEST_NODE24$"; then
|
||||
echo "::warning title=Node 24 Fallback::Node 24 version $LATEST_NODE24 not found in alpine_nodejs releases, using fallback"
|
||||
# Fall back to latest available alpine_nodejs v24 release
|
||||
LATEST_NODE24=$(echo "$ALPINE_RELEASES" | grep "^v24\." | head -1 | sed 's/^v//')
|
||||
echo "Using latest available alpine_nodejs Node 24: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
# Validate that we have non-empty version numbers
|
||||
if [ -z "$LATEST_NODE20" ] || [ "$LATEST_NODE20" = "" ]; then
|
||||
echo "::error title=Invalid Node 20 Version::Failed to determine valid Node 20 version. Got: '$LATEST_NODE20'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$LATEST_NODE24" ] || [ "$LATEST_NODE24" = "" ]; then
|
||||
echo "::error title=Invalid Node 24 Version::Failed to determine valid Node 24 version. Got: '$LATEST_NODE24'"
|
||||
echo "Available alpine_nodejs releases:"
|
||||
echo "$ALPINE_RELEASES" | head -10
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Additional validation: ensure versions match expected format (x.y.z)
|
||||
if ! echo "$LATEST_NODE20" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Format::Node 20 version '$LATEST_NODE20' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! echo "$LATEST_NODE24" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Format::Node 24 version '$LATEST_NODE24' does not match expected format (x.y.z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Validated Node versions: 20=$LATEST_NODE20, 24=$LATEST_NODE24"
|
||||
echo "latest_node20=$LATEST_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "latest_node24=$LATEST_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check current versions in externals.sh
|
||||
CURRENT_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
CURRENT_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
echo "current_node20=$CURRENT_NODE20" >> $GITHUB_OUTPUT
|
||||
echo "current_node24=$CURRENT_NODE24" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine if updates are needed
|
||||
NEEDS_UPDATE20="false"
|
||||
NEEDS_UPDATE24="false"
|
||||
|
||||
if [ "$CURRENT_NODE20" != "$LATEST_NODE20" ]; then
|
||||
NEEDS_UPDATE20="true"
|
||||
echo "::notice title=Node 20 Update Available::Current: $CURRENT_NODE20 → Latest: $LATEST_NODE20"
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_NODE24" != "$LATEST_NODE24" ]; then
|
||||
NEEDS_UPDATE24="true"
|
||||
echo "::notice title=Node 24 Update Available::Current: $CURRENT_NODE24 → Latest: $LATEST_NODE24"
|
||||
fi
|
||||
|
||||
if [ "$NEEDS_UPDATE20" == "false" ] && [ "$NEEDS_UPDATE24" == "false" ]; then
|
||||
echo "::notice title=No Updates Needed::All Node.js versions are up to date"
|
||||
fi
|
||||
|
||||
echo "needs_update20=$NEEDS_UPDATE20" >> $GITHUB_OUTPUT
|
||||
echo "needs_update24=$NEEDS_UPDATE24" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update externals.sh and create PR
|
||||
if: steps.node-versions.outputs.needs_update20 == 'true' || steps.node-versions.outputs.needs_update24 == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Final validation before making changes
|
||||
NODE20_VERSION="${{ steps.node-versions.outputs.latest_node20 }}"
|
||||
NODE24_VERSION="${{ steps.node-versions.outputs.latest_node24 }}"
|
||||
|
||||
echo "Final validation of versions before PR creation:"
|
||||
echo "Node 20: '$NODE20_VERSION'"
|
||||
echo "Node 24: '$NODE24_VERSION'"
|
||||
|
||||
# Validate versions are not empty and match expected format
|
||||
if [ -z "$NODE20_VERSION" ] || ! echo "$NODE20_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 20 Version::Refusing to create PR with invalid Node 20 version: '$NODE20_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$NODE24_VERSION" ] || ! echo "$NODE24_VERSION" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$'; then
|
||||
echo "::error title=Invalid Node 24 Version::Refusing to create PR with invalid Node 24 version: '$NODE24_VERSION'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All versions validated successfully"
|
||||
|
||||
# Update the files
|
||||
if [ "${{ steps.node-versions.outputs.needs_update20 }}" == "true" ]; then
|
||||
sed -i 's/NODE20_VERSION="[^"]*"/NODE20_VERSION="'"$NODE20_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
if [ "${{ steps.node-versions.outputs.needs_update24 }}" == "true" ]; then
|
||||
sed -i 's/NODE24_VERSION="[^"]*"/NODE24_VERSION="'"$NODE24_VERSION"'"/' src/Misc/externals.sh
|
||||
fi
|
||||
|
||||
# Verify the changes were applied correctly
|
||||
echo "Verifying changes in externals.sh:"
|
||||
grep "NODE20_VERSION=" src/Misc/externals.sh
|
||||
grep "NODE24_VERSION=" src/Misc/externals.sh
|
||||
|
||||
# Ensure we actually have valid versions in the file
|
||||
UPDATED_NODE20=$(grep "NODE20_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
UPDATED_NODE24=$(grep "NODE24_VERSION=" src/Misc/externals.sh | cut -d'"' -f2)
|
||||
|
||||
if [ -z "$UPDATED_NODE20" ] || [ -z "$UPDATED_NODE24" ]; then
|
||||
echo "::error title=Update Failed::Failed to properly update externals.sh"
|
||||
echo "Updated Node 20: '$UPDATED_NODE20'"
|
||||
echo "Updated Node 24: '$UPDATED_NODE24'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/update-node"
|
||||
git checkout -b "$branch_name"
|
||||
git commit -a -m "chore: update Node versions (20: $NODE20_VERSION, 24: $NODE24_VERSION)"
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Create PR body using here-doc for proper formatting
|
||||
cat > pr_body.txt << EOF
|
||||
Automated Node.js version update:
|
||||
|
||||
- Node 20: ${{ steps.node-versions.outputs.current_node20 }} → $NODE20_VERSION
|
||||
- Node 24: ${{ steps.node-versions.outputs.current_node24 }} → $NODE24_VERSION
|
||||
|
||||
This update ensures we're using the latest stable Node.js versions for security and performance improvements.
|
||||
|
||||
**Note**: When updating Node versions, remember to also create a new release of alpine_nodejs at the updated version following the instructions at: https://github.com/actions/alpine_nodejs
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [Node Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/node-upgrade.yml)
|
||||
EOF
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: update Node versions" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "node" \
|
||||
--label "javascript" \
|
||||
--body-file pr_body.txt
|
||||
|
||||
echo "::notice title=PR Created::Successfully created Node.js version update PR on branch $branch_name"
|
||||
235
.github/workflows/npm-audit-typescript.yml
vendored
235
.github/workflows/npm-audit-typescript.yml
vendored
@@ -1,235 +0,0 @@
|
||||
name: NPM Audit Fix with TypeScript Auto-Fix
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit-with-ts-fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: NPM install and audit fix with TypeScript auto-repair
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check for vulnerabilities first
|
||||
echo "Checking for npm vulnerabilities..."
|
||||
if npm audit --audit-level=moderate; then
|
||||
echo "✅ No moderate or higher vulnerabilities found"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "⚠️ Vulnerabilities found, attempting npm audit fix..."
|
||||
|
||||
# Attempt audit fix and capture the result
|
||||
if npm audit fix; then
|
||||
echo "✅ npm audit fix completed successfully"
|
||||
AUDIT_FIX_STATUS="success"
|
||||
else
|
||||
echo "⚠️ npm audit fix failed or had issues"
|
||||
AUDIT_FIX_STATUS="failed"
|
||||
|
||||
# Try audit fix with --force as a last resort for critical/high vulns only
|
||||
echo "Checking if critical/high vulnerabilities remain..."
|
||||
if ! npm audit --audit-level=high; then
|
||||
echo "🚨 Critical/high vulnerabilities remain, attempting --force fix..."
|
||||
if npm audit fix --force; then
|
||||
echo "⚠️ npm audit fix --force completed (may have breaking changes)"
|
||||
AUDIT_FIX_STATUS="force-fixed"
|
||||
else
|
||||
echo "❌ npm audit fix --force also failed"
|
||||
AUDIT_FIX_STATUS="force-failed"
|
||||
fi
|
||||
else
|
||||
echo "✅ Only moderate/low vulnerabilities remain after failed fix"
|
||||
AUDIT_FIX_STATUS="partial-success"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "AUDIT_FIX_STATUS=$AUDIT_FIX_STATUS" >> $GITHUB_ENV
|
||||
|
||||
# Try to fix TypeScript issues automatically
|
||||
echo "Attempting to fix TypeScript compatibility issues..."
|
||||
|
||||
# Check if build fails
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Build failed, attempting automated fixes..."
|
||||
|
||||
# Common fix 1: Update @types/node to latest compatible version
|
||||
echo "Trying to update @types/node to latest version..."
|
||||
npm update @types/node
|
||||
|
||||
# Common fix 2: If that doesn't work, try installing a specific known-good version
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying specific @types/node version..."
|
||||
# Try Node 20 compatible version
|
||||
npm install --save-dev @types/node@^20.0.0
|
||||
fi
|
||||
|
||||
# Common fix 3: Clear node_modules and reinstall if still failing
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Clearing node_modules and reinstalling..."
|
||||
rm -rf node_modules package-lock.json
|
||||
npm install
|
||||
|
||||
# Re-run audit fix after clean install if it was successful before
|
||||
if [[ "$AUDIT_FIX_STATUS" == "success" || "$AUDIT_FIX_STATUS" == "force-fixed" ]]; then
|
||||
echo "Re-running npm audit fix after clean install..."
|
||||
npm audit fix || echo "Audit fix failed on second attempt"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Common fix 4: Try updating TypeScript itself
|
||||
if ! npm run build 2>/dev/null; then
|
||||
echo "Trying to update TypeScript..."
|
||||
npm update typescript
|
||||
fi
|
||||
|
||||
# Final check
|
||||
if npm run build 2>/dev/null; then
|
||||
echo "✅ Successfully fixed TypeScript issues automatically"
|
||||
else
|
||||
echo "⚠️ Could not automatically fix TypeScript issues"
|
||||
fi
|
||||
else
|
||||
echo "✅ Build passes after audit fix"
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
HUSKY: 0 # Disable husky hooks for automated commits
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-with-ts-repair"
|
||||
git checkout -b "$branch_name"
|
||||
|
||||
# Commit with --no-verify to skip husky hooks
|
||||
git commit -a -m "chore: npm audit fix with automated TypeScript compatibility fixes" --no-verify
|
||||
git push --force origin "$branch_name"
|
||||
|
||||
# Check final build status and gather info about what was changed
|
||||
build_status="✅ Build passes"
|
||||
fixes_applied=""
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
# Check what packages were updated
|
||||
if git diff HEAD~1 package.json | grep -q "@types/node"; then
|
||||
fixes_applied+="\n- Updated @types/node version for TypeScript compatibility"
|
||||
fi
|
||||
if git diff HEAD~1 package.json | grep -q "typescript"; then
|
||||
fixes_applied+="\n- Updated TypeScript version"
|
||||
fi
|
||||
if git diff HEAD~1 package-lock.json | grep -q "resolved"; then
|
||||
fixes_applied+="\n- Updated package dependencies via npm audit fix"
|
||||
fi
|
||||
|
||||
if ! npm run build 2>/dev/null; then
|
||||
build_status="⚠️ Build fails - manual review required"
|
||||
fi
|
||||
cd - > /dev/null
|
||||
|
||||
# Create enhanced PR body using here-doc for proper formatting
|
||||
audit_status_msg=""
|
||||
case "$AUDIT_FIX_STATUS" in
|
||||
"success")
|
||||
audit_status_msg="✅ **Audit Fix**: Completed successfully"
|
||||
;;
|
||||
"partial-success")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Partial success (only moderate/low vulnerabilities remain)"
|
||||
;;
|
||||
"force-fixed")
|
||||
audit_status_msg="⚠️ **Audit Fix**: Completed with --force (may have breaking changes)"
|
||||
;;
|
||||
"failed"|"force-failed")
|
||||
audit_status_msg="❌ **Audit Fix**: Failed to resolve vulnerabilities"
|
||||
;;
|
||||
*)
|
||||
audit_status_msg="❓ **Audit Fix**: Status unknown"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ⚠️ Build fails - manual review required
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
⚠️ **Manual Review Required**: The build is currently failing after automated fixes were attempted.
|
||||
|
||||
Common issues and solutions:
|
||||
- Check for TypeScript version compatibility with Node.js types
|
||||
- Review breaking changes in updated dependencies
|
||||
- Consider pinning problematic dependency versions temporarily
|
||||
- Review tsconfig.json for compatibility settings
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
else
|
||||
cat > pr_body.txt << EOF
|
||||
Automated npm audit fix with TypeScript auto-repair for hashFiles dependencies.
|
||||
|
||||
**Build Status**: ✅ Build passes
|
||||
$audit_status_msg
|
||||
|
||||
This workflow attempts to automatically fix TypeScript compatibility issues that may arise from npm audit fixes.
|
||||
|
||||
✅ **Ready to Merge**: All automated fixes were successful and the build passes.
|
||||
|
||||
**Automated Fix Strategy**:
|
||||
1. Run npm audit fix with proper error handling
|
||||
2. Update @types/node to latest compatible version
|
||||
3. Try Node 20 specific @types/node version if needed
|
||||
4. Clean reinstall dependencies if conflicts persist
|
||||
5. Update TypeScript compiler if necessary
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix with TypeScript Auto-Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit-ts-fix.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
if [ -n "$fixes_applied" ]; then
|
||||
# Add the fixes applied section to the file
|
||||
sed -i "/This workflow attempts/a\\
|
||||
\\
|
||||
**Automated Fixes Applied**:$fixes_applied" pr_body.txt
|
||||
fi
|
||||
|
||||
# Create PR with appropriate labels
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security"
|
||||
if [[ "$build_status" == *"fails"* ]]; then
|
||||
labels="dependencies,dependencies-not-dependabot,typescript,npm,security,needs-manual-review"
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix with TypeScript auto-repair" \
|
||||
--label "$labels" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "No changes to commit"
|
||||
fi
|
||||
137
.github/workflows/npm-audit.yml
vendored
137
.github/workflows/npm-audit.yml
vendored
@@ -1,137 +0,0 @@
|
||||
name: NPM Audit Fix
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 7 * * 1" # Weekly on Monday at 7 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
npm-audit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: NPM install and audit fix
|
||||
working-directory: src/Misc/expressionFunc/hashFiles
|
||||
run: |
|
||||
npm install
|
||||
|
||||
# Check what vulnerabilities exist
|
||||
echo "=== Checking current vulnerabilities ==="
|
||||
npm audit || true
|
||||
|
||||
# Apply audit fix --force to get security updates
|
||||
echo "=== Applying npm audit fix --force ==="
|
||||
npm audit fix --force
|
||||
|
||||
# Test if build still works and set status
|
||||
echo "=== Testing build compatibility ==="
|
||||
if npm run all; then
|
||||
echo "✅ Build successful after audit fix"
|
||||
echo "AUDIT_FIX_STATUS=success" >> $GITHUB_ENV
|
||||
else
|
||||
echo "❌ Build failed after audit fix - will create PR with fix instructions"
|
||||
echo "AUDIT_FIX_STATUS=build_failed" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Create PR if changes exist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Check if there are any changes
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
# Configure git
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Create branch and commit changes
|
||||
branch_name="chore/npm-audit-fix-$(date +%Y%m%d)"
|
||||
git checkout -b "$branch_name"
|
||||
git add .
|
||||
git commit -m "chore: npm audit fix for hashFiles dependencies" --no-verify
|
||||
git push origin "$branch_name"
|
||||
|
||||
# Create PR body based on what actually happened
|
||||
if [ "$AUDIT_FIX_STATUS" = "success" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**✅ Full Fix Applied Successfully**
|
||||
This update addresses npm security advisories and ensures dependencies are secure and up-to-date.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
- Verified build compatibility with `npm run all`
|
||||
|
||||
**Next steps:**
|
||||
- Review the dependency changes
|
||||
- Verify the hashFiles functionality still works as expected
|
||||
- Merge when ready
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
elif [ "$AUDIT_FIX_STATUS" = "build_failed" ]; then
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit fix for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**⚠️ Security Fixes Applied - Build Issues Need Manual Resolution**
|
||||
This update applies important security patches but causes build failures that require manual fixes.
|
||||
|
||||
**Changes made:**
|
||||
- Applied `npm audit fix --force` to resolve security vulnerabilities
|
||||
- Updated package-lock.json with security patches
|
||||
|
||||
**⚠️ Build Issues Detected:**
|
||||
The build fails after applying security fixes, likely due to TypeScript compatibility issues with updated `@types/node`.
|
||||
|
||||
**Required Manual Fixes:**
|
||||
1. Review TypeScript compilation errors in the build output
|
||||
2. Update TypeScript configuration if needed
|
||||
3. Consider pinning `@types/node` to a compatible version
|
||||
4. Run `npm run all` locally to verify fixes
|
||||
|
||||
**Next steps:**
|
||||
- **DO NOT merge until build issues are resolved**
|
||||
- Apply manual fixes for TypeScript compatibility
|
||||
- Test the hashFiles functionality still works as expected
|
||||
- Merge when build passes
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
else
|
||||
# Fallback case
|
||||
cat > pr_body.txt << 'EOF'
|
||||
Automated npm audit attempted for security vulnerabilities in hashFiles dependencies.
|
||||
|
||||
**ℹ️ No Changes Applied**
|
||||
No security vulnerabilities were found or no changes were needed.
|
||||
|
||||
---
|
||||
|
||||
Autogenerated by [NPM Audit Fix Workflow](https://github.com/actions/runner/blob/main/.github/workflows/npm-audit.yml)
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create PR
|
||||
gh pr create -B main -H "$branch_name" \
|
||||
--title "chore: npm audit fix for hashFiles dependencies" \
|
||||
--label "dependencies" \
|
||||
--label "dependencies-weekly-check" \
|
||||
--label "dependencies-not-dependabot" \
|
||||
--label "npm" \
|
||||
--label "typescript" \
|
||||
--label "security" \
|
||||
--body-file pr_body.txt
|
||||
else
|
||||
echo "✅ No changes to commit - npm audit fix did not modify any files"
|
||||
fi
|
||||
@@ -1,47 +1,48 @@
|
||||
name: Publish DockerImage from Release Branch
|
||||
name: Publish Runner Image
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
releaseBranch:
|
||||
description: 'Release Branch (releases/mXXX)'
|
||||
required: true
|
||||
runnerVersion:
|
||||
type: string
|
||||
description: Version of the runner being installed
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
|
||||
jobs:
|
||||
publish-image:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.event.inputs.releaseBranch }}
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '');
|
||||
console.log(`Using runner version ${runnerVersion}`);
|
||||
if (!/^\d+\.\d+\.\d+$/.test(runnerVersion)) {
|
||||
throw new Error(`Invalid runner version: ${runnerVersion}`);
|
||||
const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}"
|
||||
if (inputRunnerVersion) {
|
||||
console.log(`Using input runner version ${inputRunnerVersion}`)
|
||||
core.setOutput('version', inputRunnerVersion);
|
||||
return
|
||||
}
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
console.log(`Using runner version ${runnerVersion}`)
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -49,7 +50,7 @@ jobs:
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
@@ -63,13 +64,5 @@ jobs:
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
org.opencontainers.image.licenses=MIT
|
||||
512
.github/workflows/release.yml
vendored
512
.github/workflows/release.yml
vendored
@@ -11,15 +11,16 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Make sure ./releaseVersion match ./src/runnerversion
|
||||
# Query GitHub release ensure version is not used
|
||||
- name: Check version
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
|
||||
@@ -29,7 +30,7 @@ jobs:
|
||||
return
|
||||
}
|
||||
try {
|
||||
const release = await github.rest.repos.getReleaseByTag({
|
||||
const release = await github.repos.getReleaseByTag({
|
||||
owner: '${{ github.event.repository.owner.name }}',
|
||||
repo: '${{ github.event.repository.name }}',
|
||||
tag: 'v' + runnerVersion
|
||||
@@ -52,6 +53,27 @@ jobs:
|
||||
win-arm64-sha: ${{ steps.sha.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha: ${{ steps.sha.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-arm64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
win-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-arm64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
osx-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-arm64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64, osx-arm64, win-arm64 ]
|
||||
@@ -77,7 +99,7 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
- runtime: win-arm64
|
||||
@@ -86,7 +108,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -114,67 +136,113 @@ jobs:
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
|
||||
echo "sha256=$sha" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64' || matrix.runtime == 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64' && matrix.runtime != 'win-arm64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner-packages-${{ matrix.runtime }}
|
||||
name: runner-packages
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Download runner package tar.gz/zip produced by 'build' job
|
||||
- name: Download Artifact (win-x64)
|
||||
uses: actions/download-artifact@v6
|
||||
- name: Download Artifact
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: runner-packages-win-x64
|
||||
path: ./
|
||||
- name: Download Artifact (win-arm64)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-win-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-x64)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-osx-x64
|
||||
path: ./
|
||||
- name: Download Artifact (osx-arm64)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-osx-arm64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-x64)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-x64
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-arm
|
||||
path: ./
|
||||
- name: Download Artifact (linux-arm64)
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
name: runner-packages-linux-arm64
|
||||
name: runner-packages
|
||||
path: ./
|
||||
|
||||
# Create ReleaseNote file
|
||||
- name: Create ReleaseNote
|
||||
id: releaseNote
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
@@ -185,11 +253,33 @@ jobs:
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-arm64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
@@ -214,93 +304,379 @@ jobs:
|
||||
|
||||
# Upload release assets (full runner packages)
|
||||
- name: Upload Release Asset (win-x64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (win-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64)
|
||||
uses: actions/upload-release-asset@v1.0.2
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
publish-image:
|
||||
needs: release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
attestations: write
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Compute image version
|
||||
id: image
|
||||
uses: actions/github-script@v8.0.0
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
@@ -309,10 +685,10 @@ jobs:
|
||||
core.setOutput('version', runnerVersion);
|
||||
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
@@ -320,7 +696,7 @@ jobs:
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./images
|
||||
platforms: |
|
||||
@@ -334,13 +710,5 @@ jobs:
|
||||
push: true
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
|
||||
org.opencontainers.image.licenses=MIT
|
||||
annotations: |
|
||||
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
|
||||
|
||||
- name: Generate attestation
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
org.opencontainers.image.licenses=MIT
|
||||
|
||||
2
.github/workflows/stale-bot.yml
vendored
2
.github/workflows/stale-bot.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v10
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -26,5 +26,4 @@ _dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
.DS_Store
|
||||
.mono
|
||||
**/*.DotSettings.user
|
||||
@@ -1 +1,6 @@
|
||||
cd src/Misc/expressionFunc/hashFiles && npx lint-staged
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
cd src/Misc/expressionFunc/hashFiles
|
||||
|
||||
npx lint-staged
|
||||
|
||||
18
README.md
18
README.md
@@ -20,20 +20,6 @@ Runner releases:
|
||||
|
||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
### Note
|
||||
## Contribute
|
||||
|
||||
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](security.md)
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing.
|
||||
|
||||
@@ -250,42 +250,6 @@ Two problem matchers can be used:
|
||||
}
|
||||
```
|
||||
|
||||
#### Default from path
|
||||
|
||||
The problem matcher can specify a `fromPath` property at the top level, which applies when a specific pattern doesn't provide a value for `fromPath`. This is useful for tools that don't include project file information in their output.
|
||||
|
||||
For example, given the following compiler output that doesn't include project file information:
|
||||
|
||||
```
|
||||
ClassLibrary.cs(16,24): warning CS0612: 'ClassLibrary.Helpers.MyHelper.Name' is obsolete
|
||||
```
|
||||
|
||||
A problem matcher with a default from path can be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "csc-minimal",
|
||||
"fromPath": "ClassLibrary/ClassLibrary.csproj",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+)\\((\\d+),(\\d+)\\): (error|warning) (.+): (.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"code": 5,
|
||||
"message": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
This ensures that the file is rooted to the correct path when there's not enough information in the error messages to extract a `fromPath`.
|
||||
|
||||
#### Mitigate regular expression denial of service (ReDos)
|
||||
|
||||
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||
|
||||
@@ -23,7 +23,7 @@ This feature is mainly intended for self hosted runner administrators.
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_STARTED`
|
||||
- `ACTIONS_RUNNER_HOOK_JOB_COMPLETED`
|
||||
|
||||
You can set these variables to the **absolute** path of a `.sh` or `.ps1` file.
|
||||
You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file.
|
||||
|
||||
We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate.
|
||||
- `.sh` files will execute with the args `-e {pathtofile}`
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries its own copies of node.js executables under `<runner_root>/externals/node20/` and `<runner_root>/externals/node24/`.
|
||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at either `<runner_root>/externals/node20/` or `<runner_root>/externals/node24/` depending on the version specified in the action's metadata.
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Contributions
|
||||
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
|
||||
|
||||
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
||||
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
# Runner Dependency Management Process
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the automated dependency management process for the GitHub Actions Runner, designed to ensure we maintain up-to-date and secure dependencies while providing predictable release cycles.
|
||||
|
||||
## Release Schedule
|
||||
|
||||
- **Monthly Runner Releases**: New runner versions are released monthly
|
||||
- **Weekly Dependency Checks**: Automated workflows check for dependency updates every Monday
|
||||
- **Security Patches**: Critical security vulnerabilities are addressed immediately outside the regular schedule
|
||||
|
||||
## Automated Workflows
|
||||
|
||||
**Note**: These workflows are implemented across separate PRs for easier review and independent deployment. Each workflow includes comprehensive error handling and security-focused vulnerability detection.
|
||||
|
||||
### 1. Foundation Labels
|
||||
|
||||
- **Workflow**: `.github/workflows/setup-labels.yml` (PR #4024)
|
||||
- **Purpose**: Creates consistent dependency labels for all automation workflows
|
||||
- **Labels**: `dependencies`, `security`, `typescript`, `needs-manual-review`
|
||||
- **Prerequisite**: Must be merged before other workflows for proper labeling
|
||||
|
||||
### 2. Node.js Version Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/node-upgrade.yml`
|
||||
- **Schedule**: Mondays at 6:00 AM UTC
|
||||
- **Purpose**: Updates Node.js 20 and 24 versions in `src/Misc/externals.sh`
|
||||
- **Source**: [nodejs.org](https://nodejs.org) and [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
- **Priority**: First (NPM depends on current Node.js versions)
|
||||
|
||||
### 3. NPM Security Audit
|
||||
|
||||
- **Primary Workflow**: `.github/workflows/npm-audit.yml` ("NPM Audit Fix")
|
||||
- **Schedule**: Mondays at 7:00 AM UTC
|
||||
- **Purpose**: Automated security vulnerability detection and basic fixes
|
||||
- **Location**: `src/Misc/expressionFunc/hashFiles/`
|
||||
- **Features**: npm audit, security patch application, PR creation
|
||||
- **Dependency**: Runs after Node.js updates for optimal compatibility
|
||||
|
||||
- **Fallback Workflow**: `.github/workflows/npm-audit-typescript.yml` ("NPM Audit Fix with TypeScript Auto-Fix")
|
||||
- **Trigger**: Manual dispatch only
|
||||
- **Purpose**: Manual security audit with TypeScript compatibility fixes
|
||||
- **Use Case**: When scheduled workflow fails or needs custom intervention
|
||||
- **Features**: Enhanced TypeScript auto-repair, graduated security response
|
||||
- **How to Use**:
|
||||
1. If the scheduled "NPM Audit Fix" workflow fails, go to Actions tab
|
||||
2. Select "NPM Audit Fix with TypeScript Auto-Fix" workflow
|
||||
3. Click "Run workflow" and optionally specify fix level (auto/manual)
|
||||
4. Review the generated PR for TypeScript compatibility issues
|
||||
|
||||
### 4. .NET SDK Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/dotnet-upgrade.yml`
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates .NET SDK and package versions with build validation
|
||||
- **Features**: Global.json updates, NuGet package management, compatibility checking
|
||||
- **Independence**: Runs independently of Node.js/NPM updates
|
||||
|
||||
### 5. Docker/Buildx Updates
|
||||
|
||||
- **Workflow**: `.github/workflows/docker-buildx-upgrade.yml` ("Docker/Buildx Version Upgrade")
|
||||
- **Schedule**: Mondays at midnight UTC
|
||||
- **Purpose**: Updates Docker and Docker Buildx versions with multi-platform validation
|
||||
- **Features**: Container security scanning, multi-architecture build testing
|
||||
- **Independence**: Runs independently of other dependency updates
|
||||
|
||||
### 6. Dependency Monitoring
|
||||
|
||||
- **Workflow**: `.github/workflows/dependency-check.yml` ("Dependency Status Check")
|
||||
- **Schedule**: Mondays at 11:00 AM UTC
|
||||
- **Purpose**: Comprehensive status report of all dependencies with security audit
|
||||
- **Features**: Multi-dependency checking, npm audit status, build validation, choice of specific component checks
|
||||
- **Summary**: Runs last to capture results from all morning dependency updates
|
||||
|
||||
## Release Process Integration
|
||||
|
||||
### Pre-Release Checklist
|
||||
|
||||
Before each monthly runner release:
|
||||
|
||||
1. **Check Dependency PRs**:
|
||||
|
||||
```bash
|
||||
# List all open dependency PRs
|
||||
gh pr list --label "dependencies" --state open
|
||||
|
||||
# List only automated weekly dependency updates
|
||||
gh pr list --label "dependencies-weekly-check" --state open
|
||||
|
||||
# List only custom dependency automation (not dependabot)
|
||||
gh pr list --label "dependencies-not-dependabot" --state open
|
||||
```
|
||||
|
||||
2. **Run Manual Dependency Check**:
|
||||
- Go to Actions tab → "Dependency Status Check" → "Run workflow"
|
||||
- Review the summary for any outdated dependencies
|
||||
|
||||
3. **Review and Merge Updates**:
|
||||
- Prioritize security-related updates
|
||||
- Test dependency updates in development environment
|
||||
- Merge approved dependency PRs
|
||||
|
||||
### Vulnerability Response
|
||||
|
||||
#### Critical Security Vulnerabilities
|
||||
|
||||
- **Response Time**: Within 24 hours
|
||||
- **Process**:
|
||||
1. Assess impact on runner security
|
||||
2. Create hotfix branch if runner data security is affected
|
||||
3. Expedite patch release if necessary
|
||||
4. Document in security advisory if applicable
|
||||
|
||||
#### Non-Critical Vulnerabilities
|
||||
|
||||
- **Response Time**: Next monthly release
|
||||
- **Process**:
|
||||
1. Evaluate if vulnerability affects runner functionality
|
||||
2. Include fix in regular dependency update cycle
|
||||
3. Document in release notes
|
||||
|
||||
## Monitoring and Alerts
|
||||
|
||||
### GitHub Actions Workflow Status
|
||||
|
||||
- All dependency workflows create PRs with the `dependencies` label
|
||||
- Failed workflows should be investigated immediately
|
||||
- Weekly dependency status reports are generated automatically
|
||||
|
||||
### Manual Checks
|
||||
|
||||
You can manually trigger dependency checks:
|
||||
|
||||
- **Full Status**: Run "Dependency Status Check" workflow
|
||||
- **Specific Component**: Use the dropdown to check individual dependencies
|
||||
|
||||
## Dependency Labels
|
||||
|
||||
All automated dependency PRs are tagged with labels for easy filtering and management:
|
||||
|
||||
### Primary Labels
|
||||
|
||||
- **`dependencies`**: All automated dependency-related PRs
|
||||
- **`dependencies-weekly-check`**: Automated weekly dependency updates from scheduled workflows
|
||||
- **`dependencies-not-dependabot`**: Custom dependency automation (not created by dependabot)
|
||||
- **`security`**: Security vulnerability fixes and patches
|
||||
- **`typescript`**: TypeScript compatibility and type definition updates
|
||||
- **`needs-manual-review`**: Complex updates requiring human verification
|
||||
|
||||
### Technology-Specific Labels
|
||||
|
||||
- **`node`**: Node.js version updates
|
||||
- **`javascript`**: JavaScript runtime and tooling updates
|
||||
- **`npm`**: NPM package and security updates
|
||||
- **`dotnet`**: .NET SDK and NuGet package updates
|
||||
- **`docker`**: Docker and container tooling updates
|
||||
|
||||
### Workflow-Specific Branches
|
||||
|
||||
- **Node.js updates**: `chore/update-node` branch
|
||||
- **NPM security fixes**: `chore/npm-audit-fix-YYYYMMDD` and `chore/npm-audit-fix-with-ts-repair` branches
|
||||
- **NuGet/.NET updates**: `feature/dotnetsdk-upgrade/{version}` branches
|
||||
- **Docker updates**: `feature/docker-buildx-upgrade` branch
|
||||
|
||||
## Special Considerations
|
||||
|
||||
### Node.js Updates
|
||||
|
||||
When updating Node.js versions, remember to:
|
||||
|
||||
1. Create a corresponding release in [actions/alpine_nodejs](https://github.com/actions/alpine_nodejs)
|
||||
2. Follow the alpine_nodejs getting started guide
|
||||
3. Test container builds with new Node versions
|
||||
|
||||
### .NET SDK Updates
|
||||
|
||||
- Only patch versions are auto-updated within the same major.minor version
|
||||
- Major/minor version updates require manual review and testing
|
||||
|
||||
### Docker Updates
|
||||
|
||||
- Updates include both Docker Engine and Docker Buildx
|
||||
- Verify compatibility with runner container workflows
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **NPM Audit Workflow Fails**:
|
||||
- Check if `package.json` exists in `src/Misc/expressionFunc/hashFiles/`
|
||||
- Verify Node.js setup step succeeded
|
||||
|
||||
2. **Version Detection Fails**:
|
||||
- Check if upstream APIs are available
|
||||
- Verify parsing logic for version extraction
|
||||
|
||||
3. **PR Creation Fails**:
|
||||
- Ensure `GITHUB_TOKEN` has sufficient permissions
|
||||
- Check if branch already exists
|
||||
|
||||
### Contact
|
||||
|
||||
For questions about the dependency management process:
|
||||
|
||||
- Create an issue with the `dependencies` label
|
||||
- Review existing dependency management workflows
|
||||
- Consult the runner team for security-related concerns
|
||||
|
||||
## Metrics and KPIs
|
||||
|
||||
Track these metrics to measure dependency management effectiveness:
|
||||
|
||||
- Number of open dependency PRs at release time
|
||||
- Time to merge dependency updates
|
||||
- Number of security vulnerabilities by severity
|
||||
- Release cycle adherence (monthly target)
|
||||
@@ -4,7 +4,16 @@
|
||||
|
||||
## Supported Distributions and Versions
|
||||
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)."
|
||||
x64
|
||||
- Red Hat Enterprise Linux 7+
|
||||
- CentOS 7+
|
||||
- Oracle Linux 7+
|
||||
- Fedora 29+
|
||||
- Debian 9+
|
||||
- Ubuntu 16.04+
|
||||
- Linux Mint 18+
|
||||
- openSUSE 15+
|
||||
- SUSE Enterprise Linux (SLES) 12 SP2+
|
||||
|
||||
## Install .Net Core 3.x Linux Dependencies
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)."
|
||||
- macOS High Sierra (10.13) and later versions
|
||||
- x64 and arm64 (Apple Silicon)
|
||||
|
||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)."
|
||||
- Windows 7 64-bit
|
||||
- Windows 8.1 64-bit
|
||||
- Windows 10 64-bit
|
||||
- Windows Server 2012 R2 64-bit
|
||||
- Windows Server 2016 64-bit
|
||||
- Windows Server 2019 64-bit
|
||||
|
||||
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# Source: https://github.com/dotnet/dotnet-docker
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy AS build
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build
|
||||
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||
ARG DOCKER_VERSION=29.0.2
|
||||
ARG BUILDX_VERSION=0.30.1
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.5.0
|
||||
ARG DOCKER_VERSION=24.0.6
|
||||
ARG BUILDX_VERSION=0.11.2
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
@@ -21,10 +21,6 @@ RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-c
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v0.8.0/actions-runner-hooks-k8s-0.8.0.zip \
|
||||
&& unzip ./runner-container-hooks.zip -d ./k8s-novolume \
|
||||
&& rm runner-container-hooks.zip
|
||||
|
||||
RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& if [ "$RUNNER_ARCH" = "amd64" ]; then export DOCKER_ARCH=x86_64 ; fi \
|
||||
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
|
||||
@@ -33,25 +29,20 @@ RUN export RUNNER_ARCH=${TARGETARCH} \
|
||||
&& rm -rf docker.tgz \
|
||||
&& mkdir -p /usr/local/lib/docker/cli-plugins \
|
||||
&& curl -fLo /usr/local/lib/docker/cli-plugins/docker-buildx \
|
||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||
"https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \
|
||||
&& chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy
|
||||
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV RUNNER_MANUALLY_TRAP_SIG=1
|
||||
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
|
||||
ENV ImageOS=ubuntu22
|
||||
|
||||
# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows
|
||||
RUN apt update -y \
|
||||
&& apt install -y --no-install-recommends sudo lsb-release gpg-agent software-properties-common curl jq unzip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure git-core/ppa based on guidance here: https://git-scm.com/download/linux
|
||||
RUN add-apt-repository ppa:git-core/ppa \
|
||||
&& apt update -y \
|
||||
&& apt install -y git \
|
||||
RUN apt-get update -y \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
sudo \
|
||||
lsb-release \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
@@ -59,8 +50,7 @@ RUN adduser --disabled-password --gecos "" --uid 1001 runner \
|
||||
&& usermod -aG sudo runner \
|
||||
&& usermod -aG docker runner \
|
||||
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
|
||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers \
|
||||
&& chmod 777 /home/runner
|
||||
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
|
||||
|
||||
WORKDIR /home/runner
|
||||
|
||||
|
||||
@@ -1,42 +1,30 @@
|
||||
## What's Changed
|
||||
* Custom Image: Preflight checks by @lawrencegripper in https://github.com/actions/runner/pull/4081
|
||||
* Update dotnet sdk to latest version @8.0.415 by @github-actions[bot] in https://github.com/actions/runner/pull/4080
|
||||
* Link to an extant discussion category by @jsoref in https://github.com/actions/runner/pull/4084
|
||||
* Improve logic around decide IsHostedServer. by @TingluoHuang in https://github.com/actions/runner/pull/4086
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4093
|
||||
* Compare updated template evaluator by @ericsciple in https://github.com/actions/runner/pull/4092
|
||||
* fix(dockerfile): set more lenient permissions on /home/runner by @caxu-rh in https://github.com/actions/runner/pull/4083
|
||||
* Add support for libicu73-76 for newer Debian/Ubuntu versions by @lets-build-an-ocean in https://github.com/actions/runner/pull/4098
|
||||
* Bump actions/download-artifact from 5 to 6 by @dependabot[bot] in https://github.com/actions/runner/pull/4089
|
||||
* Bump actions/upload-artifact from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/4088
|
||||
* Bump Azure.Storage.Blobs from 12.25.1 to 12.26.0 by @dependabot[bot] in https://github.com/actions/runner/pull/4077
|
||||
* Only start runner after network is online by @dupondje in https://github.com/actions/runner/pull/4094
|
||||
* Retry http error related to DNS resolution failure. by @TingluoHuang in https://github.com/actions/runner/pull/4110
|
||||
* Update Docker to v29.0.1 and Buildx to v0.30.0 by @github-actions[bot] in https://github.com/actions/runner/pull/4114
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4115
|
||||
* Update dotnet sdk to latest version @8.0.416 by @github-actions[bot] in https://github.com/actions/runner/pull/4116
|
||||
* Compare updated workflow parser for ActionManifestManager by @ericsciple in https://github.com/actions/runner/pull/4111
|
||||
* Bump npm pkg version for hashFiles. by @TingluoHuang in https://github.com/actions/runner/pull/4122
|
||||
* Trim whitespace in `./Misc/contentHash/dotnetRuntime/*` by @TingluoHuang in https://github.com/actions/runner/pull/2915
|
||||
* Send os and arch during long poll by @luketomlinson in https://github.com/actions/runner/pull/2913
|
||||
* Revert "Update default version to node20 (#2844)" by @takost in https://github.com/actions/runner/pull/2918
|
||||
* Fix telemetry publish from JobServerQueue. by @TingluoHuang in https://github.com/actions/runner/pull/2919
|
||||
* Use block blob instead of append blob by @yacaovsnc in https://github.com/actions/runner/pull/2924
|
||||
* Provide detail info on untar failures. by @TingluoHuang in https://github.com/actions/runner/pull/2939
|
||||
* Bump node.js to 20.8.1 by @TingluoHuang in https://github.com/actions/runner/pull/2945
|
||||
* Update dotnet sdk to latest version @6.0.415 by @github-actions in https://github.com/actions/runner/pull/2929
|
||||
* Fix typo in log strings by @rajbos in https://github.com/actions/runner/pull/2695
|
||||
* feat: add support of arm64 arch runners in service creation script by @tuxity in https://github.com/actions/runner/pull/2606
|
||||
* Add `buildx` to images by @ajschmidt8 in https://github.com/actions/runner/pull/2901
|
||||
|
||||
## New Contributors
|
||||
* @lawrencegripper made their first contribution in https://github.com/actions/runner/pull/4081
|
||||
* @caxu-rh made their first contribution in https://github.com/actions/runner/pull/4083
|
||||
* @lets-build-an-ocean made their first contribution in https://github.com/actions/runner/pull/4098
|
||||
* @dupondje made their first contribution in https://github.com/actions/runner/pull/4094
|
||||
* @tuxity made their first contribution in https://github.com/actions/runner/pull/2606
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.329.0...v2.330.0
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.310.2...v2.311.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
|
||||
|
||||
## Windows x64
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
|
||||
```powershell
|
||||
``` powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -46,13 +34,13 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
|
||||
## Windows arm64
|
||||
## [Pre-release] Windows arm64
|
||||
**Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows.
|
||||
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
|
||||
```powershell
|
||||
``` powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -64,7 +52,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
|
||||
## OSX x64
|
||||
|
||||
```bash
|
||||
``` bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -75,7 +63,7 @@ tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## OSX arm64 (Apple silicon)
|
||||
|
||||
```bash
|
||||
``` bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -86,7 +74,7 @@ tar xzf ./actions-runner-osx-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux x64
|
||||
|
||||
```bash
|
||||
``` bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -97,7 +85,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm64
|
||||
|
||||
```bash
|
||||
``` bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -108,7 +96,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Linux arm
|
||||
|
||||
```bash
|
||||
``` bash
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
# Download the latest runner package
|
||||
@@ -118,7 +106,6 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Using your self hosted runner
|
||||
|
||||
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
|
||||
|
||||
## SHA-256 Checksums
|
||||
@@ -132,3 +119,27 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-arm64_noexternals --><WIN_ARM64_SHA_NOEXTERNALS><!-- END SHA win-arm64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noexternals --><OSX_ARM64_SHA_NOEXTERNALS><!-- END SHA osx-arm64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-arm64_noruntime --><WIN_ARM64_SHA_NORUNTIME><!-- END SHA win-arm64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-arm64_noruntime --><OSX_ARM64_SHA_NORUNTIME><!-- END SHA osx-arm64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-win-arm64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-arm64_noruntime_noexternals --><WIN_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-arm64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-arm64_noruntime_noexternals --><OSX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -57,13 +57,4 @@
|
||||
<PropertyGroup>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup>
|
||||
<!-- Enable NuGet package auditing -->
|
||||
<NuGetAudit>true</NuGetAudit>
|
||||
<!-- Audit direct and transitive packages -->
|
||||
<NuGetAuditMode>all</NuGetAuditMode>
|
||||
<!-- Report low, moderate, high and critical advisories -->
|
||||
<NuGetAuditLevel>moderate</NuGetAuditLevel>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-arm
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-arm
Normal file
@@ -0,0 +1 @@
|
||||
531b31914e525ecb12cc5526415bc70a112ebc818f877347af1a231011f539c5
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-arm64
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-arm64
Normal file
@@ -0,0 +1 @@
|
||||
722dd5fa5ecc207fcccf67f6e502d689f2119d8117beff2041618fba17dc66a4
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-x64
Normal file
@@ -0,0 +1 @@
|
||||
8ca75c76e15ab9dc7fe49a66c5c74e171e7fabd5d26546fda8931bd11bff30f9
|
||||
1
src/Misc/contentHash/dotnetRuntime/osx-arm64
Normal file
1
src/Misc/contentHash/dotnetRuntime/osx-arm64
Normal file
@@ -0,0 +1 @@
|
||||
70496eb1c99b39b3373b5088c95a35ebbaac1098e6c47c8aab94771f3ffbf501
|
||||
1
src/Misc/contentHash/dotnetRuntime/osx-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/osx-x64
Normal file
@@ -0,0 +1 @@
|
||||
4f8d48727d535daabcaec814e0dafb271c10625366c78e7e022ca7477a73023f
|
||||
1
src/Misc/contentHash/dotnetRuntime/win-arm64
Normal file
1
src/Misc/contentHash/dotnetRuntime/win-arm64
Normal file
@@ -0,0 +1 @@
|
||||
d54d7428f2b9200a0030365a6a4e174e30a1b29b922f8254dffb2924bd09549d
|
||||
1
src/Misc/contentHash/dotnetRuntime/win-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/win-x64
Normal file
@@ -0,0 +1 @@
|
||||
eaa939c45307f46b7003902255b3a2a09287215d710984107667e03ac493eb26
|
||||
1
src/Misc/contentHash/externals/linux-arm
vendored
Normal file
1
src/Misc/contentHash/externals/linux-arm
vendored
Normal file
@@ -0,0 +1 @@
|
||||
4bf3e1af0d482af1b2eaf9f08250248a8c1aea8ec20a3c5be116d58cdd930009
|
||||
1
src/Misc/contentHash/externals/linux-arm64
vendored
Normal file
1
src/Misc/contentHash/externals/linux-arm64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ec1719a8cb4d8687328aa64f4aa7c4e3498a715d8939117874782e3e6e63a14b
|
||||
1
src/Misc/contentHash/externals/linux-x64
vendored
Normal file
1
src/Misc/contentHash/externals/linux-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
50538de29f173bb73f708c4ed2c8328a62b8795829b97b2a6cb57197e2305287
|
||||
1
src/Misc/contentHash/externals/osx-arm64
vendored
Normal file
1
src/Misc/contentHash/externals/osx-arm64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
a0a96cbb7593643b69e669bf14d7b29b7f27800b3a00bb3305aebe041456c701
|
||||
1
src/Misc/contentHash/externals/osx-x64
vendored
Normal file
1
src/Misc/contentHash/externals/osx-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
6255b22692779467047ecebd60ad46984866d75cdfe10421d593a7b51d620b09
|
||||
1
src/Misc/contentHash/externals/win-arm64
vendored
Normal file
1
src/Misc/contentHash/externals/win-arm64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
6ff1abd055dc35bfbf06f75c2f08908f660346f66ad1d8f81c910068e9ba029d
|
||||
1
src/Misc/contentHash/externals/win-x64
vendored
Normal file
1
src/Misc/contentHash/externals/win-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
433a6d748742d12abd20dc2a79b62ac3d9718ae47ef26f8e84dc8c180eea3659
|
||||
1482
src/Misc/dotnet-install.ps1
vendored
1482
src/Misc/dotnet-install.ps1
vendored
File diff suppressed because it is too large
Load Diff
1272
src/Misc/dotnet-install.sh
vendored
1272
src/Misc/dotnet-install.sh
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"plugins": ["@typescript-eslint", "@stylistic"],
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
@@ -26,7 +26,7 @@
|
||||
],
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@stylistic/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
@@ -47,8 +47,8 @@
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"@stylistic/semi": ["error", "never"],
|
||||
"@stylistic/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"filenames/match-regex" : "off",
|
||||
"github/no-then" : 1, // warning
|
||||
|
||||
2147
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
2147
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,8 @@
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||
"prepare": "cd ../../../../ && husky"
|
||||
"prepare": "cd ../../../../ && husky install"
|
||||
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -35,17 +36,16 @@
|
||||
"@actions/glob": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^3.1.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"@types/node": "^20.6.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.7.2",
|
||||
"@typescript-eslint/parser": "^6.7.2",
|
||||
"@vercel/ncc": "^0.38.0",
|
||||
"eslint": "^8.47.0",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-github": "^4.10.0",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.5.0",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.2.2",
|
||||
"husky": "^8.0.3",
|
||||
"lint-staged": "^14.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,11 +3,12 @@ PACKAGERUNTIME=$1
|
||||
PRECACHE=$2
|
||||
|
||||
NODE_URL=https://nodejs.org/dist
|
||||
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.19.6"
|
||||
NODE24_VERSION="24.11.1"
|
||||
NODE16_VERSION="16.20.2"
|
||||
NODE20_VERSION="20.8.1"
|
||||
# used only for win-arm64, remove node16 unofficial version when official version is available
|
||||
NODE16_UNOFFICIAL_VERSION="16.20.0"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
@@ -62,16 +63,17 @@ function acquireExternalTool() {
|
||||
echo "Curl version: $CURL_VERSION"
|
||||
|
||||
# curl -f Fail silently (no output at all) on HTTP errors (H)
|
||||
# -k Allow connections to SSL sites without certs (H)
|
||||
# -S Show error. With -s, make curl show errors when they occur
|
||||
# -L Follow redirects (H)
|
||||
# -o FILE Write to FILE instead of stdout
|
||||
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
|
||||
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
|
||||
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
|
||||
curl -fSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
else
|
||||
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
|
||||
curl -fSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
|
||||
fi
|
||||
|
||||
# Move the partial file to the download target.
|
||||
@@ -138,10 +140,10 @@ function acquireExternalTool() {
|
||||
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
@@ -150,10 +152,10 @@ fi
|
||||
# Download the external tools only for Windows.
|
||||
if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then
|
||||
# todo: replace these with official release when available
|
||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
|
||||
acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin
|
||||
if [[ "$PRECACHE" != "" ]]; then
|
||||
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
|
||||
fi
|
||||
@@ -161,29 +163,30 @@ fi
|
||||
|
||||
# Download the external tools only for OSX.
|
||||
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-x64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then
|
||||
# node.js v12 doesn't support macOS on arm64.
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
# Download the external tools for Linux PACKAGERUNTIMEs.
|
||||
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-x64.tar.gz" node24 fix_nested_dir
|
||||
acquireExternalTool "$NODE_ALPINE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-alpine-x64.tar.gz" node24_alpine
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-arm64.tar.gz" node24 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir
|
||||
fi
|
||||
|
||||
@@ -114,11 +114,6 @@ var runService = function () {
|
||||
);
|
||||
stopping = true;
|
||||
}
|
||||
} else if (code === 5) {
|
||||
console.log(
|
||||
"Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else {
|
||||
var messagePrefix = "Runner listener exit with undefined return code";
|
||||
unknownFailureRetryCount++;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description={{Description}}
|
||||
After=network-online.target
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
ExecStart={{RunnerRoot}}/runsvc.sh
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ var __webpack_modules__ = ({
|
||||
|
||||
/***/ 4711:
|
||||
/***/ 2627:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -22,23 +22,13 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -56,15 +46,15 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
const crypto = __importStar(__nccwpck_require__(6982));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const glob = __importStar(__nccwpck_require__(7206));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const stream = __importStar(__nccwpck_require__(2203));
|
||||
const util = __importStar(__nccwpck_require__(9023));
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
function run() {
|
||||
var _a, e_1, _b, _c;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
var _a, e_1, _b, _c;
|
||||
// arg0 -> node
|
||||
// arg1 -> hashFiles.js
|
||||
// env[followSymbolicLinks] = true/null
|
||||
@@ -138,7 +128,7 @@ function run() {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4914:
|
||||
/***/ 7351:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -164,8 +154,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.issue = exports.issueCommand = void 0;
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
@@ -237,7 +227,7 @@ function escapeProperty(s) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7484:
|
||||
/***/ 2186:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -272,12 +262,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
|
||||
const command_1 = __nccwpck_require__(4914);
|
||||
const file_command_1 = __nccwpck_require__(4753);
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const oidc_utils_1 = __nccwpck_require__(5306);
|
||||
const command_1 = __nccwpck_require__(7351);
|
||||
const file_command_1 = __nccwpck_require__(717);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
@@ -562,17 +552,17 @@ exports.getIDToken = getIDToken;
|
||||
/**
|
||||
* Summary exports
|
||||
*/
|
||||
var summary_1 = __nccwpck_require__(1847);
|
||||
var summary_1 = __nccwpck_require__(1327);
|
||||
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
|
||||
/**
|
||||
* @deprecated use core.summary
|
||||
*/
|
||||
var summary_2 = __nccwpck_require__(1847);
|
||||
var summary_2 = __nccwpck_require__(1327);
|
||||
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
|
||||
/**
|
||||
* Path exports
|
||||
*/
|
||||
var path_utils_1 = __nccwpck_require__(1976);
|
||||
var path_utils_1 = __nccwpck_require__(2981);
|
||||
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
|
||||
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
|
||||
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
|
||||
@@ -580,7 +570,7 @@ Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: funct
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4753:
|
||||
/***/ 717:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -609,10 +599,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const uuid_1 = __nccwpck_require__(2048);
|
||||
const utils_1 = __nccwpck_require__(302);
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const uuid_1 = __nccwpck_require__(5840);
|
||||
const utils_1 = __nccwpck_require__(5278);
|
||||
function issueFileCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
@@ -645,7 +635,7 @@ exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5306:
|
||||
/***/ 8041:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -661,9 +651,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.OidcClient = void 0;
|
||||
const http_client_1 = __nccwpck_require__(4844);
|
||||
const auth_1 = __nccwpck_require__(4552);
|
||||
const core_1 = __nccwpck_require__(7484);
|
||||
const http_client_1 = __nccwpck_require__(6255);
|
||||
const auth_1 = __nccwpck_require__(5526);
|
||||
const core_1 = __nccwpck_require__(2186);
|
||||
class OidcClient {
|
||||
static createHttpClient(allowRetry = true, maxRetry = 10) {
|
||||
const requestOptions = {
|
||||
@@ -729,7 +719,7 @@ exports.OidcClient = OidcClient;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1976:
|
||||
/***/ 2981:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -755,7 +745,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
/**
|
||||
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
|
||||
* replaced with /.
|
||||
@@ -794,7 +784,7 @@ exports.toPlatformPath = toPlatformPath;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1847:
|
||||
/***/ 1327:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -810,8 +800,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
|
||||
const os_1 = __nccwpck_require__(857);
|
||||
const fs_1 = __nccwpck_require__(9896);
|
||||
const os_1 = __nccwpck_require__(2037);
|
||||
const fs_1 = __nccwpck_require__(7147);
|
||||
const { access, appendFile, writeFile } = fs_1.promises;
|
||||
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
|
||||
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
|
||||
@@ -1084,7 +1074,7 @@ exports.summary = _summary;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 302:
|
||||
/***/ 5278:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -1131,7 +1121,7 @@ exports.toCommandProperties = toCommandProperties;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7206:
|
||||
/***/ 8090:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1147,8 +1137,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = exports.create = void 0;
|
||||
const internal_globber_1 = __nccwpck_require__(103);
|
||||
const internal_hash_files_1 = __nccwpck_require__(3608);
|
||||
const internal_globber_1 = __nccwpck_require__(8298);
|
||||
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||
/**
|
||||
* Constructs a globber
|
||||
*
|
||||
@@ -1184,7 +1174,7 @@ exports.hashFiles = hashFiles;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8164:
|
||||
/***/ 1026:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1210,7 +1200,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getOptions = void 0;
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
/**
|
||||
* Returns a copy with defaults filled in.
|
||||
*/
|
||||
@@ -1246,7 +1236,7 @@ exports.getOptions = getOptions;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 103:
|
||||
/***/ 8298:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1300,14 +1290,14 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.DefaultGlobber = void 0;
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(8164));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const patternHelper = __importStar(__nccwpck_require__(8891));
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const internal_pattern_1 = __nccwpck_require__(5370);
|
||||
const internal_search_state_1 = __nccwpck_require__(9890);
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class DefaultGlobber {
|
||||
constructor(options) {
|
||||
@@ -1488,7 +1478,7 @@ exports.DefaultGlobber = DefaultGlobber;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3608:
|
||||
/***/ 2448:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1530,12 +1520,12 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = void 0;
|
||||
const crypto = __importStar(__nccwpck_require__(6982));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const fs = __importStar(__nccwpck_require__(9896));
|
||||
const stream = __importStar(__nccwpck_require__(2203));
|
||||
const util = __importStar(__nccwpck_require__(9023));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const crypto = __importStar(__nccwpck_require__(6113));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const fs = __importStar(__nccwpck_require__(7147));
|
||||
const stream = __importStar(__nccwpck_require__(2781));
|
||||
const util = __importStar(__nccwpck_require__(3837));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
@@ -1592,7 +1582,7 @@ exports.hashFiles = hashFiles;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2644:
|
||||
/***/ 1063:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -1617,7 +1607,7 @@ var MatchKind;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4138:
|
||||
/***/ 1849:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1646,8 +1636,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||
@@ -1822,7 +1812,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6617:
|
||||
/***/ 6836:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1851,9 +1841,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Path = void 0;
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Helper class for parsing paths into segments
|
||||
@@ -1942,7 +1932,7 @@ exports.Path = Path;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8891:
|
||||
/***/ 9005:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -1968,8 +1958,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
/**
|
||||
* Given an array of patterns, returns an array of paths to search.
|
||||
@@ -2043,7 +2033,7 @@ exports.partialMatch = partialMatch;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5370:
|
||||
/***/ 4536:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -2072,13 +2062,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.Pattern = void 0;
|
||||
const os = __importStar(__nccwpck_require__(857));
|
||||
const path = __importStar(__nccwpck_require__(6928));
|
||||
const pathHelper = __importStar(__nccwpck_require__(4138));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(2613));
|
||||
const minimatch_1 = __nccwpck_require__(3772);
|
||||
const internal_match_kind_1 = __nccwpck_require__(2644);
|
||||
const internal_path_1 = __nccwpck_require__(6617);
|
||||
const os = __importStar(__nccwpck_require__(2037));
|
||||
const path = __importStar(__nccwpck_require__(1017));
|
||||
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||
const minimatch_1 = __nccwpck_require__(3973);
|
||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||
const internal_path_1 = __nccwpck_require__(6836);
|
||||
const IS_WINDOWS = process.platform === 'win32';
|
||||
class Pattern {
|
||||
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||
@@ -2305,7 +2295,7 @@ exports.Pattern = Pattern;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9890:
|
||||
/***/ 9117:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -2323,7 +2313,7 @@ exports.SearchState = SearchState;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4552:
|
||||
/***/ 5526:
|
||||
/***/ (function(__unused_webpack_module, exports) {
|
||||
|
||||
"use strict";
|
||||
@@ -2411,7 +2401,7 @@ exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHand
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4844:
|
||||
/***/ 6255:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
@@ -2447,10 +2437,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
|
||||
const http = __importStar(__nccwpck_require__(8611));
|
||||
const https = __importStar(__nccwpck_require__(5692));
|
||||
const pm = __importStar(__nccwpck_require__(4988));
|
||||
const tunnel = __importStar(__nccwpck_require__(770));
|
||||
const http = __importStar(__nccwpck_require__(3685));
|
||||
const https = __importStar(__nccwpck_require__(5687));
|
||||
const pm = __importStar(__nccwpck_require__(9835));
|
||||
const tunnel = __importStar(__nccwpck_require__(4294));
|
||||
var HttpCodes;
|
||||
(function (HttpCodes) {
|
||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||
@@ -3036,7 +3026,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4988:
|
||||
/***/ 9835:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -3125,7 +3115,7 @@ function isLoopbackAddress(host) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9380:
|
||||
/***/ 9417:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -3195,11 +3185,11 @@ function range(a, b, str) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4691:
|
||||
/***/ 3717:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
var concatMap = __nccwpck_require__(7087);
|
||||
var balanced = __nccwpck_require__(9380);
|
||||
var concatMap = __nccwpck_require__(6891);
|
||||
var balanced = __nccwpck_require__(9417);
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
@@ -3309,7 +3299,7 @@ function expand(str, isTop) {
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
@@ -3403,7 +3393,7 @@ function expand(str, isTop) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7087:
|
||||
/***/ 6891:
|
||||
/***/ ((module) => {
|
||||
|
||||
module.exports = function (xs, fn) {
|
||||
@@ -3423,19 +3413,19 @@ var isArray = Array.isArray || function (xs) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3772:
|
||||
/***/ 3973:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = (function () { try { return __nccwpck_require__(6928) } catch (e) {}}()) || {
|
||||
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = __nccwpck_require__(4691)
|
||||
var expand = __nccwpck_require__(3717)
|
||||
|
||||
var plTypes = {
|
||||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||
@@ -4377,27 +4367,27 @@ function regExpEscape (s) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 770:
|
||||
/***/ 4294:
|
||||
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
||||
|
||||
module.exports = __nccwpck_require__(218);
|
||||
module.exports = __nccwpck_require__(4219);
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 218:
|
||||
/***/ 4219:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
|
||||
|
||||
var net = __nccwpck_require__(9278);
|
||||
var tls = __nccwpck_require__(4756);
|
||||
var http = __nccwpck_require__(8611);
|
||||
var https = __nccwpck_require__(5692);
|
||||
var events = __nccwpck_require__(4434);
|
||||
var assert = __nccwpck_require__(2613);
|
||||
var util = __nccwpck_require__(9023);
|
||||
var net = __nccwpck_require__(1808);
|
||||
var tls = __nccwpck_require__(4404);
|
||||
var http = __nccwpck_require__(3685);
|
||||
var https = __nccwpck_require__(5687);
|
||||
var events = __nccwpck_require__(2361);
|
||||
var assert = __nccwpck_require__(9491);
|
||||
var util = __nccwpck_require__(3837);
|
||||
|
||||
|
||||
exports.httpOverHttp = httpOverHttp;
|
||||
@@ -4657,7 +4647,7 @@ exports.debug = debug; // for test
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2048:
|
||||
/***/ 5840:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4721,29 +4711,29 @@ Object.defineProperty(exports, "parse", ({
|
||||
}
|
||||
}));
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(6415));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(8628));
|
||||
|
||||
var _v2 = _interopRequireDefault(__nccwpck_require__(1697));
|
||||
var _v2 = _interopRequireDefault(__nccwpck_require__(6409));
|
||||
|
||||
var _v3 = _interopRequireDefault(__nccwpck_require__(4676));
|
||||
var _v3 = _interopRequireDefault(__nccwpck_require__(5122));
|
||||
|
||||
var _v4 = _interopRequireDefault(__nccwpck_require__(9771));
|
||||
var _v4 = _interopRequireDefault(__nccwpck_require__(9120));
|
||||
|
||||
var _nil = _interopRequireDefault(__nccwpck_require__(7723));
|
||||
var _nil = _interopRequireDefault(__nccwpck_require__(5332));
|
||||
|
||||
var _version = _interopRequireDefault(__nccwpck_require__(5868));
|
||||
var _version = _interopRequireDefault(__nccwpck_require__(1595));
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 216:
|
||||
/***/ 4569:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4754,7 +4744,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4773,7 +4763,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7723:
|
||||
/***/ 5332:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4788,7 +4778,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7267:
|
||||
/***/ 2746:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4799,7 +4789,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4840,7 +4830,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7879:
|
||||
/***/ 814:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4855,7 +4845,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2973:
|
||||
/***/ 807:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4866,7 +4856,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = rng;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4886,7 +4876,7 @@ function rng() {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 507:
|
||||
/***/ 5274:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4897,7 +4887,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6982));
|
||||
var _crypto = _interopRequireDefault(__nccwpck_require__(6113));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4916,7 +4906,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7597:
|
||||
/***/ 8950:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4927,7 +4917,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -4962,7 +4952,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6415:
|
||||
/***/ 8628:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -4973,9 +4963,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5076,7 +5066,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1697:
|
||||
/***/ 6409:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5087,9 +5077,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
||||
|
||||
var _md = _interopRequireDefault(__nccwpck_require__(216));
|
||||
var _md = _interopRequireDefault(__nccwpck_require__(4569));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5099,7 +5089,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2930:
|
||||
/***/ 5998:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5111,9 +5101,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
exports["default"] = _default;
|
||||
exports.URL = exports.DNS = void 0;
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(7267));
|
||||
var _parse = _interopRequireDefault(__nccwpck_require__(2746));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5184,7 +5174,7 @@ function _default(name, version, hashfunc) {
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4676:
|
||||
/***/ 5122:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5195,9 +5185,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(2973));
|
||||
var _rng = _interopRequireDefault(__nccwpck_require__(807));
|
||||
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(7597));
|
||||
var _stringify = _interopRequireDefault(__nccwpck_require__(8950));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5228,7 +5218,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9771:
|
||||
/***/ 9120:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5239,9 +5229,9 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(2930));
|
||||
var _v = _interopRequireDefault(__nccwpck_require__(5998));
|
||||
|
||||
var _sha = _interopRequireDefault(__nccwpck_require__(507));
|
||||
var _sha = _interopRequireDefault(__nccwpck_require__(5274));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5251,7 +5241,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6200:
|
||||
/***/ 6900:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5262,7 +5252,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _regex = _interopRequireDefault(__nccwpck_require__(7879));
|
||||
var _regex = _interopRequireDefault(__nccwpck_require__(814));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5275,7 +5265,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5868:
|
||||
/***/ 1595:
|
||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5286,7 +5276,7 @@ Object.defineProperty(exports, "__esModule", ({
|
||||
}));
|
||||
exports["default"] = void 0;
|
||||
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6200));
|
||||
var _validate = _interopRequireDefault(__nccwpck_require__(6900));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
@@ -5303,7 +5293,7 @@ exports["default"] = _default;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2613:
|
||||
/***/ 9491:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5311,7 +5301,7 @@ module.exports = require("assert");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6982:
|
||||
/***/ 6113:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5319,7 +5309,7 @@ module.exports = require("crypto");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4434:
|
||||
/***/ 2361:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5327,7 +5317,7 @@ module.exports = require("events");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9896:
|
||||
/***/ 7147:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5335,7 +5325,7 @@ module.exports = require("fs");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8611:
|
||||
/***/ 3685:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5343,7 +5333,7 @@ module.exports = require("http");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 5692:
|
||||
/***/ 5687:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5351,7 +5341,7 @@ module.exports = require("https");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9278:
|
||||
/***/ 1808:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5359,7 +5349,7 @@ module.exports = require("net");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 857:
|
||||
/***/ 2037:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5367,7 +5357,7 @@ module.exports = require("os");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6928:
|
||||
/***/ 1017:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5375,7 +5365,7 @@ module.exports = require("path");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 2203:
|
||||
/***/ 2781:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5383,7 +5373,7 @@ module.exports = require("stream");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 4756:
|
||||
/***/ 4404:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5391,7 +5381,7 @@ module.exports = require("tls");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9023:
|
||||
/***/ 3837:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
@@ -5441,7 +5431,7 @@ module.exports = require("util");
|
||||
/******/ // startup
|
||||
/******/ // Load entry module and return exports
|
||||
/******/ // This entry module is referenced by other modules so it can't be inlined
|
||||
/******/ var __webpack_exports__ = __nccwpck_require__(4711);
|
||||
/******/ var __webpack_exports__ = __nccwpck_require__(2627);
|
||||
/******/ module.exports = __webpack_exports__;
|
||||
/******/
|
||||
/******/ })()
|
||||
|
||||
@@ -110,7 +110,7 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks libicu76 libicu75 libicu74 libicu73 libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
apt_get_with_fallbacks libicu72 libicu71 libicu70 libicu69 libicu68 libicu67 libicu66 libicu65 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
|
||||
@@ -10,7 +10,7 @@ if [ -f ".path" ]; then
|
||||
echo ".path=${PATH}"
|
||||
fi
|
||||
|
||||
nodever="node20"
|
||||
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
# run the host process which keep the listener alive
|
||||
|
||||
@@ -123,7 +123,7 @@ fi
|
||||
# fix upgrade issue with macOS when running as a service
|
||||
attemptedtargetedfix=0
|
||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||
if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; then
|
||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||
@@ -135,23 +135,12 @@ if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; the
|
||||
then
|
||||
# inspect the open file handles to find the node process
|
||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||
# Try finding node24 first, then fallback to earlier versions if needed
|
||||
nodever="node24"
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node20
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node20"
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16
|
||||
then
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [[ $? -eq 0 && -n "$path" ]]
|
||||
then
|
||||
@@ -189,19 +178,6 @@ if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; the
|
||||
fi
|
||||
fi
|
||||
|
||||
# update runsvc.sh
|
||||
if [ -f "$rootfolder/runsvc.sh" ]
|
||||
then
|
||||
date "+[%F %T-%4N] Update runsvc.sh" >> "$logfile" 2>&1
|
||||
cat "$rootfolder/bin/runsvc.sh" > "$rootfolder/runsvc.sh"
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
date "+[%F %T-%4N] Can't update $rootfolder/runsvc.sh using $rootfolder/bin/runsvc.sh" >> "$logfile" 2>&1
|
||||
mv -fv "$logfile" "$logfile.failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
date "+[%F %T-%4N] Update succeed" >> "$logfile"
|
||||
|
||||
touch update.finished
|
||||
|
||||
@@ -49,10 +49,5 @@ if %ERRORLEVEL% EQU 4 (
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 5 (
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
echo "Exiting after unknown error code: %ERRORLEVEL%"
|
||||
exit /b 0
|
||||
@@ -70,9 +70,6 @@ elif [[ $returnCode == 4 ]]; then
|
||||
"$DIR"/safe_sleep.sh 1
|
||||
done
|
||||
exit 2
|
||||
elif [[ $returnCode == 5 ]]; then
|
||||
echo "Runner listener exit with Session Conflict error, stop the service, no retry needed."
|
||||
exit 0
|
||||
else
|
||||
echo "Exiting with unknown error code: ${returnCode}"
|
||||
exit 0
|
||||
|
||||
@@ -38,7 +38,7 @@ runWithManualTrap() {
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
"$DIR"/run-helper.sh $* &
|
||||
PID=$!
|
||||
wait $PID
|
||||
wait -f $PID
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
@@ -84,4 +84,4 @@ if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
|
||||
run $*
|
||||
else
|
||||
runWithManualTrap $*
|
||||
fi
|
||||
fi
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SECONDS=0
|
||||
while [[ $SECONDS -lt $1 ]]; do
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
:
|
||||
done
|
||||
|
||||
57
src/Misc/runnercoreassets
Normal file
57
src/Misc/runnercoreassets
Normal file
@@ -0,0 +1,57 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
270
src/Misc/runnerdotnetruntimeassets
Normal file
270
src/Misc/runnerdotnetruntimeassets
Normal file
@@ -0,0 +1,270 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-fibers-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.DiaSymReader.Native.arm64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.522.21309.dll
|
||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||
mscordaccore_amd64_amd64_6.0.2223.42425.dll
|
||||
mscordaccore_amd64_amd64_6.0.2323.48002.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
24
src/Misc/trimmedpackages_targz.json
Normal file
24
src/Misc/trimmedpackages_targz.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
24
src/Misc/trimmedpackages_zip.json
Normal file
24
src/Misc/trimmedpackages_zip.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -20,12 +20,12 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
private bool _hasConnection;
|
||||
private VssConnection _connection;
|
||||
private ActionsRunServerHttpClient _actionsRunServerClient;
|
||||
private TaskAgentHttpClient _taskAgentClient;
|
||||
|
||||
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
|
||||
{
|
||||
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
|
||||
_actionsRunServerClient = _connection.GetClient<ActionsRunServerHttpClient>();
|
||||
_taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
|
||||
_hasConnection = true;
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Common
|
||||
CheckConnection();
|
||||
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
|
||||
{
|
||||
return await _actionsRunServerClient.GetJobMessageAsync(id, cancellationToken);
|
||||
return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
|
||||
}, cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
public class AuthMigrationEventArgs : EventArgs
|
||||
{
|
||||
public AuthMigrationEventArgs(string trace)
|
||||
{
|
||||
Trace = trace;
|
||||
}
|
||||
public string Trace { get; private set; }
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@ using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
using Sdk.WebApi.WebApi.RawClient;
|
||||
|
||||
@@ -18,16 +17,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken);
|
||||
Task DeleteSessionAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token);
|
||||
|
||||
Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token);
|
||||
|
||||
Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials);
|
||||
|
||||
Task ForceRefreshConnection(VssCredentials credentials);
|
||||
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate);
|
||||
}
|
||||
|
||||
public sealed class BrokerServer : RunnerService, IBrokerServer
|
||||
@@ -39,7 +29,6 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
|
||||
{
|
||||
Trace.Entering();
|
||||
_brokerUri = serverUri;
|
||||
|
||||
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
|
||||
@@ -55,65 +44,13 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<TaskAgentSession> CreateSessionAsync(TaskAgentSession session, CancellationToken cancellationToken)
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate)
|
||||
{
|
||||
CheckConnection();
|
||||
var jobMessage = await _brokerHttpClient.CreateSessionAsync(session, cancellationToken);
|
||||
var jobMessage = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken);
|
||||
|
||||
return jobMessage;
|
||||
}
|
||||
|
||||
public Task<TaskAgentMessage> GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
var brokerSession = RetryRequest<TaskAgentMessage>(
|
||||
async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException);
|
||||
|
||||
return brokerSession;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
|
||||
// No retries
|
||||
await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
await _brokerHttpClient.DeleteSessionAsync(cancellationToken);
|
||||
}
|
||||
|
||||
public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials)
|
||||
{
|
||||
if (_brokerUri != serverUri || !_hasConnection)
|
||||
{
|
||||
return ConnectAsync(serverUri, credentials);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ForceRefreshConnection(VssCredentials credentials)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(_brokerUri?.AbsoluteUri))
|
||||
{
|
||||
return ConnectAsync(_brokerUri, credentials);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public bool ShouldRetryException(Exception ex)
|
||||
{
|
||||
if (ex is AccessDeniedException || ex is RunnerNotFoundException || ex is HostedRunnerDeprovisionedException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
using System;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -53,9 +53,6 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseV2Flow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string ServerUrlV2 { get; set; }
|
||||
|
||||
@@ -64,20 +61,8 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
get
|
||||
{
|
||||
// If the value has been explicitly set, return it.
|
||||
if (_isHostedServer.HasValue)
|
||||
{
|
||||
return _isHostedServer.Value;
|
||||
}
|
||||
|
||||
// Otherwise, try to infer it from the GitHubUrl.
|
||||
if (!string.IsNullOrEmpty(GitHubUrl))
|
||||
{
|
||||
return UrlUtil.IsHostedServer(new UriBuilder(GitHubUrl));
|
||||
}
|
||||
|
||||
// Default to true since Hosted runners likely don't have this property set.
|
||||
return true;
|
||||
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
||||
return _isHostedServer ?? true;
|
||||
}
|
||||
|
||||
set
|
||||
@@ -131,15 +116,11 @@ namespace GitHub.Runner.Common
|
||||
bool IsConfigured();
|
||||
bool IsServiceConfigured();
|
||||
bool HasCredentials();
|
||||
bool IsMigratedConfigured();
|
||||
CredentialData GetCredentials();
|
||||
CredentialData GetMigratedCredentials();
|
||||
RunnerSettings GetSettings();
|
||||
RunnerSettings GetMigratedSettings();
|
||||
void SaveCredential(CredentialData credential);
|
||||
void SaveMigratedCredential(CredentialData credential);
|
||||
void SaveSettings(RunnerSettings settings);
|
||||
void SaveMigratedSettings(RunnerSettings settings);
|
||||
void DeleteCredential();
|
||||
void DeleteMigratedCredential();
|
||||
void DeleteSettings();
|
||||
@@ -149,7 +130,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
private string _binPath;
|
||||
private string _configFilePath;
|
||||
private string _migratedConfigFilePath;
|
||||
private string _credFilePath;
|
||||
private string _migratedCredFilePath;
|
||||
private string _serviceConfigFilePath;
|
||||
@@ -157,7 +137,6 @@ namespace GitHub.Runner.Common
|
||||
private CredentialData _creds;
|
||||
private CredentialData _migratedCreds;
|
||||
private RunnerSettings _settings;
|
||||
private RunnerSettings _migratedSettings;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -175,9 +154,6 @@ namespace GitHub.Runner.Common
|
||||
_configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
||||
Trace.Info("ConfigFilePath: {0}", _configFilePath);
|
||||
|
||||
_migratedConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedRunner);
|
||||
Trace.Info("MigratedConfigFilePath: {0}", _migratedConfigFilePath);
|
||||
|
||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||
|
||||
@@ -193,7 +169,7 @@ namespace GitHub.Runner.Common
|
||||
public bool HasCredentials()
|
||||
{
|
||||
Trace.Info("HasCredentials()");
|
||||
bool credsStored = new FileInfo(_credFilePath).Exists || new FileInfo(_migratedCredFilePath).Exists;
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
||||
Trace.Info("stored {0}", credsStored);
|
||||
return credsStored;
|
||||
}
|
||||
@@ -201,7 +177,7 @@ namespace GitHub.Runner.Common
|
||||
public bool IsConfigured()
|
||||
{
|
||||
Trace.Info("IsConfigured()");
|
||||
bool configured = new FileInfo(_configFilePath).Exists || new FileInfo(_migratedConfigFilePath).Exists;
|
||||
bool configured = new FileInfo(_configFilePath).Exists;
|
||||
Trace.Info("IsConfigured: {0}", configured);
|
||||
return configured;
|
||||
}
|
||||
@@ -209,19 +185,11 @@ namespace GitHub.Runner.Common
|
||||
public bool IsServiceConfigured()
|
||||
{
|
||||
Trace.Info("IsServiceConfigured()");
|
||||
bool serviceConfigured = new FileInfo(_serviceConfigFilePath).Exists;
|
||||
bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists;
|
||||
Trace.Info($"IsServiceConfigured: {serviceConfigured}");
|
||||
return serviceConfigured;
|
||||
}
|
||||
|
||||
public bool IsMigratedConfigured()
|
||||
{
|
||||
Trace.Info("IsMigratedConfigured()");
|
||||
bool configured = new FileInfo(_migratedConfigFilePath).Exists;
|
||||
Trace.Info("IsMigratedConfigured: {0}", configured);
|
||||
return configured;
|
||||
}
|
||||
|
||||
public CredentialData GetCredentials()
|
||||
{
|
||||
if (_creds == null)
|
||||
@@ -261,25 +229,6 @@ namespace GitHub.Runner.Common
|
||||
return _settings;
|
||||
}
|
||||
|
||||
public RunnerSettings GetMigratedSettings()
|
||||
{
|
||||
if (_migratedSettings == null)
|
||||
{
|
||||
RunnerSettings configuredSettings = null;
|
||||
if (File.Exists(_migratedConfigFilePath))
|
||||
{
|
||||
string json = File.ReadAllText(_migratedConfigFilePath, Encoding.UTF8);
|
||||
Trace.Info($"Read migrated setting file: {json.Length} chars");
|
||||
configuredSettings = StringUtil.ConvertFromJson<RunnerSettings>(json);
|
||||
}
|
||||
|
||||
ArgUtil.NotNull(configuredSettings, nameof(configuredSettings));
|
||||
_migratedSettings = configuredSettings;
|
||||
}
|
||||
|
||||
return _migratedSettings;
|
||||
}
|
||||
|
||||
public void SaveCredential(CredentialData credential)
|
||||
{
|
||||
Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath);
|
||||
@@ -295,21 +244,6 @@ namespace GitHub.Runner.Common
|
||||
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveMigratedCredential(CredentialData credential)
|
||||
{
|
||||
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
|
||||
if (File.Exists(_migratedCredFilePath))
|
||||
{
|
||||
// Delete existing credential file first, since the file is hidden and not able to overwrite.
|
||||
Trace.Info("Delete exist runner migrated credential file.");
|
||||
IOUtil.DeleteFile(_migratedCredFilePath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(credential, _migratedCredFilePath);
|
||||
Trace.Info("Migrated Credentials Saved.");
|
||||
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveSettings(RunnerSettings settings)
|
||||
{
|
||||
Trace.Info("Saving runner settings.");
|
||||
@@ -325,21 +259,6 @@ namespace GitHub.Runner.Common
|
||||
File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveMigratedSettings(RunnerSettings settings)
|
||||
{
|
||||
Trace.Info("Saving runner migrated settings");
|
||||
if (File.Exists(_migratedConfigFilePath))
|
||||
{
|
||||
// Delete existing settings file first, since the file is hidden and not able to overwrite.
|
||||
Trace.Info("Delete exist runner migrated settings file.");
|
||||
IOUtil.DeleteFile(_migratedConfigFilePath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(settings, _migratedConfigFilePath);
|
||||
Trace.Info("Migrated Settings Saved.");
|
||||
File.SetAttributes(_migratedConfigFilePath, File.GetAttributes(_migratedConfigFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void DeleteCredential()
|
||||
{
|
||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||
@@ -354,12 +273,6 @@ namespace GitHub.Runner.Common
|
||||
public void DeleteSettings()
|
||||
{
|
||||
IOUtil.Delete(_configFilePath, default(CancellationToken));
|
||||
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||
}
|
||||
|
||||
public void DeleteMigratedSettings()
|
||||
{
|
||||
IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,6 @@ namespace GitHub.Runner.Common
|
||||
public enum WellKnownConfigFile
|
||||
{
|
||||
Runner,
|
||||
MigratedRunner,
|
||||
Credentials,
|
||||
MigratedCredentials,
|
||||
RSACredentials,
|
||||
@@ -154,41 +153,15 @@ namespace GitHub.Runner.Common
|
||||
public const int RetryableError = 2;
|
||||
public const int RunnerUpdating = 3;
|
||||
public const int RunOnceRunnerUpdating = 4;
|
||||
public const int SessionConflict = 5;
|
||||
// Temporary error code to indicate that the runner configuration has been refreshed
|
||||
// and the runner should be restarted. This is a temporary code and will be removed in the future after
|
||||
// the runner is migrated to runner admin.
|
||||
public const int RunnerConfigurationRefreshed = 6;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
{
|
||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||
public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action";
|
||||
public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages";
|
||||
public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate";
|
||||
public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks";
|
||||
public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context";
|
||||
public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors";
|
||||
public static readonly string ContainerActionRunnerTemp = "actions_container_action_runner_temp";
|
||||
public static readonly string SnapshotPreflightHostedRunnerCheck = "actions_snapshot_preflight_hosted_runner_check";
|
||||
public static readonly string SnapshotPreflightImageGenPoolCheck = "actions_snapshot_preflight_image_gen_pool_check";
|
||||
public static readonly string CompareWorkflowParser = "actions_runner_compare_workflow_parser";
|
||||
}
|
||||
|
||||
// Node version migration related constants
|
||||
public static class NodeMigration
|
||||
{
|
||||
// Node versions
|
||||
public static readonly string Node20 = "node20";
|
||||
public static readonly string Node24 = "node24";
|
||||
|
||||
// Environment variables for controlling node version selection
|
||||
public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24";
|
||||
public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
|
||||
// Feature flags for controlling the migration phases
|
||||
public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault";
|
||||
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
@@ -202,6 +175,11 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/.";
|
||||
public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings";
|
||||
public static readonly string DeprecatedNodeVersion = "node16";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/";
|
||||
public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -272,13 +250,14 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string RequireJobContainer = "ACTIONS_RUNNER_REQUIRE_JOB_CONTAINER";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class Agent
|
||||
{
|
||||
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
|
||||
|
||||
// Set this env var to "nodeXY" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
|
||||
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
|
||||
|
||||
@@ -15,7 +15,6 @@ using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -37,12 +36,6 @@ namespace GitHub.Runner.Common
|
||||
event EventHandler Unloading;
|
||||
void ShutdownRunner(ShutdownReason reason);
|
||||
void WritePerfCounter(string counter);
|
||||
void LoadDefaultUserAgents();
|
||||
|
||||
bool AllowAuthMigration { get; }
|
||||
void EnableAuthMigration(string trace);
|
||||
void DeferAuthMigration(TimeSpan deferred, string trace);
|
||||
event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||
}
|
||||
|
||||
public enum StartupType
|
||||
@@ -74,28 +67,17 @@ namespace GitHub.Runner.Common
|
||||
private StartupType _startupType;
|
||||
private string _perfFile;
|
||||
private RunnerWebProxy _webProxy = new();
|
||||
private string _hostType = string.Empty;
|
||||
|
||||
// disable auth migration by default
|
||||
private readonly ManualResetEventSlim _allowAuthMigration = new ManualResetEventSlim(false);
|
||||
private DateTime _deferredAuthMigrationTime = DateTime.MaxValue;
|
||||
private readonly object _authMigrationLock = new object();
|
||||
private CancellationTokenSource _authMigrationAutoReenableTaskCancellationTokenSource = new();
|
||||
private Task _authMigrationAutoReenableTask;
|
||||
|
||||
public event EventHandler Unloading;
|
||||
public event EventHandler<AuthMigrationEventArgs> AuthMigrationChanged;
|
||||
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
||||
public ShutdownReason RunnerShutdownReason { get; private set; }
|
||||
public ISecretMasker SecretMasker => _secretMasker;
|
||||
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
||||
public RunnerWebProxy WebProxy => _webProxy;
|
||||
public bool AllowAuthMigration => _allowAuthMigration.IsSet;
|
||||
public HostContext(string hostType, string logFile = null)
|
||||
{
|
||||
// Validate args.
|
||||
ArgUtil.NotNullOrEmpty(hostType, nameof(hostType));
|
||||
_hostType = hostType;
|
||||
|
||||
_loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly);
|
||||
_loadContext.Unloading += LoadContext_Unloading;
|
||||
@@ -214,81 +196,6 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
}
|
||||
|
||||
LoadDefaultUserAgents();
|
||||
}
|
||||
|
||||
// marked as internal for testing
|
||||
internal async Task AuthMigrationAuthReenableAsync(TimeSpan refreshInterval, CancellationToken token)
|
||||
{
|
||||
try
|
||||
{
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
_trace.Verbose($"Auth migration defer timer is set to expire at {_deferredAuthMigrationTime.ToString("O")}. AllowAuthMigration: {_allowAuthMigration.IsSet}.");
|
||||
await Task.Delay(refreshInterval, token);
|
||||
if (!_allowAuthMigration.IsSet && DateTime.UtcNow > _deferredAuthMigrationTime)
|
||||
{
|
||||
_trace.Info($"Auth migration defer timer expired. Allowing auth migration.");
|
||||
EnableAuthMigration("Auth migration defer timer expired.");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// Task was cancelled, exit the loop.
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Info("Error in auth migration reenable task.");
|
||||
_trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public void EnableAuthMigration(string trace)
|
||||
{
|
||||
_allowAuthMigration.Set();
|
||||
|
||||
lock (_authMigrationLock)
|
||||
{
|
||||
if (_authMigrationAutoReenableTask == null)
|
||||
{
|
||||
var refreshIntervalInMS = 60 * 1000;
|
||||
#if DEBUG
|
||||
// For L0, we will refresh faster
|
||||
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL")))
|
||||
{
|
||||
refreshIntervalInMS = int.Parse(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL"));
|
||||
}
|
||||
#endif
|
||||
_authMigrationAutoReenableTask = AuthMigrationAuthReenableAsync(TimeSpan.FromMilliseconds(refreshIntervalInMS), _authMigrationAutoReenableTaskCancellationTokenSource.Token);
|
||||
}
|
||||
}
|
||||
|
||||
_trace.Info($"Enable auth migration at {DateTime.UtcNow.ToString("O")}.");
|
||||
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||
}
|
||||
|
||||
public void DeferAuthMigration(TimeSpan deferred, string trace)
|
||||
{
|
||||
_allowAuthMigration.Reset();
|
||||
|
||||
// defer migration for a while
|
||||
lock (_authMigrationLock)
|
||||
{
|
||||
_deferredAuthMigrationTime = DateTime.UtcNow.Add(deferred);
|
||||
}
|
||||
|
||||
_trace.Info($"Disabled auth migration until {_deferredAuthMigrationTime.ToString("O")}.");
|
||||
AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace));
|
||||
}
|
||||
|
||||
public void LoadDefaultUserAgents()
|
||||
{
|
||||
if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress))
|
||||
{
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
@@ -298,6 +205,11 @@ namespace GitHub.Runner.Common
|
||||
_userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString));
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
}
|
||||
|
||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
if (File.Exists(credFile))
|
||||
{
|
||||
@@ -307,36 +219,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
|
||||
}
|
||||
|
||||
// for Hosted runner, we can pull orchestrationId from JWT claims of the runner listening token.
|
||||
if (credData != null &&
|
||||
credData.Scheme == Constants.Configuration.OAuthAccessToken &&
|
||||
credData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out var accessToken) &&
|
||||
!string.IsNullOrEmpty(accessToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
var jwt = JsonWebToken.Create(accessToken);
|
||||
var claims = jwt.ExtractClaims();
|
||||
var orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
// fallback to orchid for C# actions-service
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
_trace.Info($"Pull OrchestrationId {orchestrationId} from runner JWT claims");
|
||||
_userAgents.Insert(0, new ProductInfoHeaderValue("OrchestrationId", orchestrationId));
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Error("Fail to extract OrchestrationId from runner JWT claims");
|
||||
_trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
|
||||
@@ -362,11 +244,6 @@ namespace GitHub.Runner.Common
|
||||
_trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests.");
|
||||
_userAgents.Add(extraUserAgentHeader);
|
||||
}
|
||||
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
_userAgents.Add(new ProductInfoHeaderValue("Pid", currentProcess.Id.ToString()));
|
||||
_userAgents.Add(new ProductInfoHeaderValue("CreationTime", Uri.EscapeDataString(DateTime.UtcNow.ToString("O"))));
|
||||
_userAgents.Add(new ProductInfoHeaderValue($"({_hostType})"));
|
||||
}
|
||||
|
||||
public string GetDirectory(WellKnownDirectory directory)
|
||||
@@ -453,12 +330,6 @@ namespace GitHub.Runner.Common
|
||||
".runner");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.MigratedRunner:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".runner_migrated");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.Credentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
@@ -659,18 +530,6 @@ namespace GitHub.Runner.Common
|
||||
_loadContext.Unloading -= LoadContext_Unloading;
|
||||
_loadContext = null;
|
||||
}
|
||||
|
||||
if (_authMigrationAutoReenableTask != null)
|
||||
{
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource?.Cancel();
|
||||
}
|
||||
|
||||
if (_authMigrationAutoReenableTaskCancellationTokenSource != null)
|
||||
{
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource?.Dispose();
|
||||
_authMigrationAutoReenableTaskCancellationTokenSource = null;
|
||||
}
|
||||
|
||||
_httpTraceSubscription?.Dispose();
|
||||
_diagListenerSubscription?.Dispose();
|
||||
_traceManager?.Dispose();
|
||||
@@ -757,7 +616,7 @@ namespace GitHub.Runner.Common
|
||||
payload[0] = Enum.Parse(typeof(GitHub.Services.Common.VssCredentialsType), ((int)payload[0]).ToString());
|
||||
}
|
||||
|
||||
if (payload.Length > 0 && !string.IsNullOrEmpty(eventData.Message))
|
||||
if (payload.Length > 0)
|
||||
{
|
||||
message = String.Format(eventData.Message.Replace("%n", Environment.NewLine), payload);
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Security;
|
||||
using System.Net.WebSockets;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
@@ -180,10 +179,6 @@ namespace GitHub.Runner.Common
|
||||
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
|
||||
userAgentValues.AddRange(HostContext.UserAgents);
|
||||
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
this._websocketClient.Options.RemoteCertificateValidationCallback = (_, _, _, _) => true;
|
||||
}
|
||||
|
||||
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ namespace GitHub.Runner.Common
|
||||
TaskCompletionSource<int> JobRecordUpdated { get; }
|
||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
Task ShutdownAsync();
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false);
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
|
||||
@@ -74,7 +74,6 @@ namespace GitHub.Runner.Common
|
||||
private readonly List<JobTelemetry> _jobTelemetries = new();
|
||||
private bool _queueInProcess = false;
|
||||
private bool _resultsServiceOnly = false;
|
||||
private int _resultsServiceExceptionsCount = 0;
|
||||
private Stopwatch _resultsUploadTimer = new();
|
||||
private Stopwatch _actionsUploadTimer = new();
|
||||
|
||||
@@ -105,10 +104,11 @@ namespace GitHub.Runner.Common
|
||||
_resultsServer = hostContext.GetService<IResultsServer>();
|
||||
}
|
||||
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false)
|
||||
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false)
|
||||
{
|
||||
Trace.Entering();
|
||||
_resultsServiceOnly = resultsServiceOnly;
|
||||
_enableTelemetry = enableTelemetry;
|
||||
|
||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
@@ -134,15 +134,9 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
liveConsoleFeedUrl = feedStreamUrl;
|
||||
}
|
||||
jobRequest.Variables.TryGetValue("system.github.results_upload_with_sdk", out VariableValue resultsUseSdkVariable);
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken, StringUtil.ConvertToBoolean(resultsUseSdkVariable?.Value));
|
||||
_resultsClientInitiated = true;
|
||||
}
|
||||
|
||||
// Enable telemetry if we have both results service and actions service
|
||||
if (_resultsClientInitiated && !_resultsServiceOnly)
|
||||
{
|
||||
_enableTelemetry = true;
|
||||
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
|
||||
_resultsClientInitiated = true;
|
||||
}
|
||||
|
||||
if (_queueInProcess)
|
||||
@@ -557,10 +551,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
await UploadSummaryFile(file);
|
||||
}
|
||||
if (string.Equals(file.Type, CoreAttachmentType.ResultsDiagnosticLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await UploadResultsDiagnosticLogsFile(file);
|
||||
}
|
||||
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (file.RecordId != _jobTimelineRecordId)
|
||||
@@ -580,9 +570,9 @@ namespace GitHub.Runner.Common
|
||||
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
|
||||
Trace.Error(ex);
|
||||
errorCount++;
|
||||
_resultsServiceExceptionsCount++;
|
||||
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3)
|
||||
if (!_resultsServiceOnly)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(ex);
|
||||
@@ -613,7 +603,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
private void SendResultsTelemetry(Exception ex)
|
||||
{
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {HostContext.SecretMasker.MaskSecrets(ex.Message)}" };
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
|
||||
|
||||
var telemetryRecord = new TimelineRecord()
|
||||
@@ -709,9 +699,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Info("Catch exception during update steps, skip update Results.");
|
||||
Trace.Error(e);
|
||||
_resultsServiceExceptionsCount++;
|
||||
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs
|
||||
if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3)
|
||||
if (!_resultsServiceOnly)
|
||||
{
|
||||
_resultsClientInitiated = false;
|
||||
SendResultsTelemetry(e);
|
||||
@@ -934,17 +922,6 @@ namespace GitHub.Runner.Common
|
||||
await UploadResultsFile(file, summaryHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsDiagnosticLogsFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting to upload diagnostic logs file to results service {file.Name}, {file.Path}");
|
||||
ResultsFileUploadHandler diagnosticLogsHandler = async (file) =>
|
||||
{
|
||||
await _resultsServer.CreateResultsDiagnosticLogsAsync(file.PlanId, file.JobId, file.Path, CancellationToken.None);
|
||||
};
|
||||
|
||||
await UploadResultsFile(file, diagnosticLogsHandler);
|
||||
}
|
||||
|
||||
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
|
||||
{
|
||||
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.Launch.Client;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -15,7 +14,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
void InitializeLaunchClient(Uri uri, string token);
|
||||
|
||||
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class LaunchServer : RunnerService, ILaunchServer
|
||||
@@ -24,34 +23,17 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public void InitializeLaunchClient(Uri uri, string token)
|
||||
{
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
|
||||
// Create retry handler
|
||||
IEnumerable<DelegatingHandler> delegatingHandlers = new List<DelegatingHandler>();
|
||||
if (settings.MaxRetryRequest > 0)
|
||||
{
|
||||
delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) };
|
||||
}
|
||||
|
||||
// Setup RawHttpMessageHandler without credentials
|
||||
var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings);
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
this._launchClient = new LaunchHttpClient(uri, pipeline, token, disposeHandler: true);
|
||||
var httpMessageHandler = HostContext.CreateHttpClientHandler();
|
||||
this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
|
||||
}
|
||||
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
|
||||
CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors)
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_launchClient != null)
|
||||
{
|
||||
if (!displayHelpfulActionsDownloadErrors)
|
||||
{
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, cancellationToken);
|
||||
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Launch client is not initialized.");
|
||||
|
||||
@@ -19,7 +19,7 @@ namespace GitHub.Runner.Common
|
||||
[ServiceLocator(Default = typeof(ResultServer))]
|
||||
public interface IResultsServer : IRunnerService, IAsyncDisposable
|
||||
{
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk);
|
||||
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
|
||||
|
||||
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
|
||||
@@ -35,8 +35,6 @@ namespace GitHub.Runner.Common
|
||||
|
||||
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
|
||||
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
|
||||
Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class ResultServer : RunnerService, IResultsServer
|
||||
@@ -53,9 +51,9 @@ namespace GitHub.Runner.Common
|
||||
private String _liveConsoleFeedUrl;
|
||||
private string _token;
|
||||
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token, bool useSdk)
|
||||
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
|
||||
{
|
||||
this._resultsClient = CreateHttpClient(uri, token, useSdk);
|
||||
this._resultsClient = CreateHttpClient(uri, token);
|
||||
|
||||
_token = token;
|
||||
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
|
||||
@@ -65,7 +63,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token, bool useSdk)
|
||||
public ResultsHttpClient CreateHttpClient(Uri uri, string token)
|
||||
{
|
||||
// Using default 100 timeout
|
||||
RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null);
|
||||
@@ -82,7 +80,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers);
|
||||
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true, useSdk: useSdk);
|
||||
return new ResultsHttpClient(uri, pipeline, token, disposeHandler: true);
|
||||
}
|
||||
|
||||
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
|
||||
@@ -143,18 +141,6 @@ namespace GitHub.Runner.Common
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public Task CreateResultsDiagnosticLogsAsync(string planId, string jobId, string file,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_resultsClient != null)
|
||||
{
|
||||
return _resultsClient.UploadResultsDiagnosticLogsAsync(planId, jobId, file,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Results client is not initialized.");
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
|
||||
|
||||
@@ -5,7 +5,6 @@ using System.Threading.Tasks;
|
||||
using GitHub.Actions.RunService.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
@@ -18,7 +17,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
|
||||
|
||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken token);
|
||||
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
|
||||
|
||||
Task CompleteJobAsync(
|
||||
Guid planId,
|
||||
@@ -28,9 +27,6 @@ namespace GitHub.Runner.Common
|
||||
IList<StepResult> stepResults,
|
||||
IList<Annotation> jobAnnotations,
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken token);
|
||||
|
||||
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
|
||||
@@ -60,15 +56,12 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, string billingOwnerId, CancellationToken cancellationToken)
|
||||
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest<AgentJobRequestMessage>(
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, billingOwnerId, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404
|
||||
ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409
|
||||
ex is not TaskOrchestrationJobUnprocessableException); // HTTP status 422
|
||||
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
|
||||
}
|
||||
|
||||
public Task CompleteJobAsync(
|
||||
@@ -79,26 +72,18 @@ namespace GitHub.Runner.Common
|
||||
IList<StepResult> stepResults,
|
||||
IList<Annotation> jobAnnotations,
|
||||
string environmentUrl,
|
||||
IList<Telemetry> telemetry,
|
||||
string billingOwnerId,
|
||||
string infrastructureFailureCategory,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest(
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, infrastructureFailureCategory, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not VssUnauthorizedException && // HTTP status 401
|
||||
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken);
|
||||
}
|
||||
|
||||
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return RetryRequest<RenewJobResponse>(
|
||||
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken,
|
||||
shouldRetry: ex =>
|
||||
ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404
|
||||
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
@@ -15,11 +15,11 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
||||
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
|
||||
@@ -19,7 +19,6 @@ namespace GitHub.Runner.Common
|
||||
|
||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||
Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId);
|
||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||
}
|
||||
|
||||
@@ -44,15 +43,89 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||
{
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||
|
||||
return runnersList.ToTaskAgents();
|
||||
}
|
||||
|
||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||
{
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runner-groups";
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||
|
||||
return agentPools?.ToAgentPoolList();
|
||||
}
|
||||
|
||||
@@ -103,12 +176,6 @@ namespace GitHub.Runner.Common
|
||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||
}
|
||||
|
||||
public async Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId)
|
||||
{
|
||||
var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners/{runnerId}";
|
||||
await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Delete, 3, "Failed to delete agent");
|
||||
}
|
||||
|
||||
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
|
||||
{
|
||||
int retry = 0;
|
||||
@@ -125,22 +192,13 @@ namespace GitHub.Runner.Common
|
||||
try
|
||||
{
|
||||
HttpResponseMessage response = null;
|
||||
switch (requestType)
|
||||
if (requestType == RequestType.Get)
|
||||
{
|
||||
case RequestType.Get:
|
||||
response = await httpClient.GetAsync(githubApiUrl);
|
||||
break;
|
||||
case RequestType.Post:
|
||||
response = await httpClient.PostAsync(githubApiUrl, body);
|
||||
break;
|
||||
case RequestType.Patch:
|
||||
response = await httpClient.PatchAsync(githubApiUrl, body);
|
||||
break;
|
||||
case RequestType.Delete:
|
||||
response = await httpClient.DeleteAsync(githubApiUrl);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(requestType), requestType, null);
|
||||
response = await httpClient.GetAsync(githubApiUrl);
|
||||
}
|
||||
else
|
||||
{
|
||||
response = await httpClient.PostAsync(githubApiUrl, body);
|
||||
}
|
||||
|
||||
if (response != null)
|
||||
@@ -175,61 +233,5 @@ namespace GitHub.Runner.Common
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
}
|
||||
|
||||
private string GetEntityUrl(string githubUrl)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
var isOrgRunner = path.Length == 1;
|
||||
var isRepoOrEnterpriseRunner = path.Length == 2;
|
||||
var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (isOrgRunner)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions";
|
||||
}
|
||||
}
|
||||
else if (isRepoOrEnterpriseRunner)
|
||||
{
|
||||
// Repository Runner
|
||||
if (isRepoRunner)
|
||||
{
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Enterprise Runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions";
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
|
||||
}
|
||||
|
||||
return githubApiUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
using System;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -50,10 +50,7 @@ namespace GitHub.Runner.Common
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default);
|
||||
|
||||
// runner config refresh
|
||||
Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken);
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace);
|
||||
}
|
||||
|
||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||
@@ -318,17 +315,10 @@ namespace GitHub.Runner.Common
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default)
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
// runner config refresh
|
||||
public Task<string> RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.RefreshRunnerConfigAsync(agentId, configType, encodedRunnerConfig, cancellationToken: cancellationToken);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,8 +70,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
protected async Task RetryRequest(Func<Task> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxAttempts = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
int maxRetryAttemptsCount = 5
|
||||
)
|
||||
{
|
||||
async Task<Unit> wrappedFunc()
|
||||
@@ -79,31 +78,31 @@ namespace GitHub.Runner.Common
|
||||
await func();
|
||||
return Unit.Value;
|
||||
}
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxAttempts, shouldRetry);
|
||||
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
|
||||
}
|
||||
|
||||
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
|
||||
CancellationToken cancellationToken,
|
||||
int maxAttempts = 5,
|
||||
int maxRetryAttemptsCount = 5,
|
||||
Func<Exception, bool> shouldRetry = null
|
||||
)
|
||||
{
|
||||
var attempt = 0;
|
||||
var retryCount = 0;
|
||||
while (true)
|
||||
{
|
||||
attempt++;
|
||||
retryCount++;
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await func();
|
||||
}
|
||||
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
|
||||
catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex)))
|
||||
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
|
||||
{
|
||||
Trace.Error("Catch exception during request");
|
||||
Trace.Error(ex);
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left.");
|
||||
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
|
||||
await Task.Delay(backOff, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class NodeUtil
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents details about an environment variable, including its value and source
|
||||
/// </summary>
|
||||
private class EnvironmentVariableInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value evaluates to true
|
||||
/// </summary>
|
||||
public bool IsTrue { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value came from the workflow environment
|
||||
/// </summary>
|
||||
public bool FromWorkflow { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether the value came from the system environment
|
||||
/// </summary>
|
||||
public bool FromSystem { get; set; }
|
||||
}
|
||||
|
||||
private const string _defaultNodeVersion = "node20";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node20" });
|
||||
private const string _defaultNodeVersion = "node16";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] { "node16", "node20" });
|
||||
public static string GetInternalNodeVersion()
|
||||
{
|
||||
var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||
@@ -41,122 +18,5 @@ namespace GitHub.Runner.Common.Util
|
||||
}
|
||||
return _defaultNodeVersion;
|
||||
}
|
||||
/// <summary>
|
||||
/// Determines the appropriate Node version for Actions to use
|
||||
/// </summary>
|
||||
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||
/// <param name="useNode24ByDefault">Feature flag indicating if Node 24 should be the default</param>
|
||||
/// <param name="requireNode24">Feature flag indicating if Node 24 is required</param>
|
||||
/// <returns>The Node version to use (node20 or node24) and warning message if both env vars are set</returns>
|
||||
public static (string nodeVersion, string warningMessage) DetermineActionsNodeVersion(
|
||||
IDictionary<string, string> workflowEnvironment = null,
|
||||
bool useNode24ByDefault = false,
|
||||
bool requireNode24 = false)
|
||||
{
|
||||
// Phase 3: Always use Node 24 regardless of environment variables
|
||||
if (requireNode24)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
// Get environment variable details with source information
|
||||
var forceNode24Details = GetEnvironmentVariableDetails(
|
||||
Constants.Runner.NodeMigration.ForceNode24Variable, workflowEnvironment);
|
||||
|
||||
var allowUnsecureNodeDetails = GetEnvironmentVariableDetails(
|
||||
Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, workflowEnvironment);
|
||||
|
||||
bool forceNode24 = forceNode24Details.IsTrue;
|
||||
bool allowUnsecureNode = allowUnsecureNodeDetails.IsTrue;
|
||||
string warningMessage = null;
|
||||
|
||||
// Check if both flags are set from the same source
|
||||
bool bothFromWorkflow = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||
forceNode24Details.FromWorkflow && allowUnsecureNodeDetails.FromWorkflow;
|
||||
|
||||
bool bothFromSystem = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue &&
|
||||
forceNode24Details.FromSystem && allowUnsecureNodeDetails.FromSystem;
|
||||
|
||||
// Handle the case when both are set in the same source
|
||||
if (bothFromWorkflow || bothFromSystem)
|
||||
{
|
||||
string source = bothFromWorkflow ? "workflow" : "system";
|
||||
string defaultVersion = useNode24ByDefault ? Constants.Runner.NodeMigration.Node24 : Constants.Runner.NodeMigration.Node20;
|
||||
warningMessage = $"Both {Constants.Runner.NodeMigration.ForceNode24Variable} and {Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable} environment variables are set to true in the {source} environment. This is likely a configuration error. Using the default Node version: {defaultVersion}.";
|
||||
return (defaultVersion, warningMessage);
|
||||
}
|
||||
|
||||
// Phase 2: Node 24 is the default
|
||||
if (useNode24ByDefault)
|
||||
{
|
||||
if (allowUnsecureNode)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, null);
|
||||
}
|
||||
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
// Phase 1: Node 20 is the default
|
||||
if (forceNode24)
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
return (Constants.Runner.NodeMigration.Node20, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if Node24 is requested but running on ARM32 Linux, and determines if fallback is needed.
|
||||
/// </summary>
|
||||
/// <param name="preferredVersion">The preferred Node version</param>
|
||||
/// <returns>A tuple containing the adjusted node version and an optional warning message</returns>
|
||||
public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(string preferredVersion)
|
||||
{
|
||||
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) &&
|
||||
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) &&
|
||||
Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, "Node 24 is not supported on Linux ARM32 platforms. Falling back to Node 20.");
|
||||
}
|
||||
|
||||
return (preferredVersion, null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets detailed information about an environment variable from both workflow and system environments
|
||||
/// </summary>
|
||||
/// <param name="variableName">The name of the environment variable</param>
|
||||
/// <param name="workflowEnvironment">Optional dictionary containing workflow-level environment variables</param>
|
||||
/// <returns>An EnvironmentVariableInfo object containing details about the variable from both sources</returns>
|
||||
private static EnvironmentVariableInfo GetEnvironmentVariableDetails(string variableName, IDictionary<string, string> workflowEnvironment)
|
||||
{
|
||||
var info = new EnvironmentVariableInfo();
|
||||
|
||||
// Check workflow environment
|
||||
bool foundInWorkflow = false;
|
||||
string workflowValue = null;
|
||||
|
||||
if (workflowEnvironment != null && workflowEnvironment.TryGetValue(variableName, out workflowValue))
|
||||
{
|
||||
foundInWorkflow = true;
|
||||
info.FromWorkflow = true;
|
||||
info.IsTrue = StringUtil.ConvertToBoolean(workflowValue); // Workflow value takes precedence for the boolean value
|
||||
}
|
||||
|
||||
// Also check system environment
|
||||
string systemValue = Environment.GetEnvironmentVariable(variableName);
|
||||
bool foundInSystem = !string.IsNullOrEmpty(systemValue);
|
||||
|
||||
info.FromSystem = foundInSystem;
|
||||
|
||||
// If not found in workflow, use system values
|
||||
if (!foundInWorkflow)
|
||||
{
|
||||
info.IsTrue = StringUtil.ConvertToBoolean(systemValue);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,12 +9,11 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -23,242 +22,33 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
private TaskAgentSession _session;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IBrokerServer _brokerServer;
|
||||
private ICredentialManager _credMgr;
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private bool _accessTokenRevoked = false;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private bool _needRefreshCredsV2 = false;
|
||||
private bool _handlerInitialized = false;
|
||||
private bool _isMigratedSettings = false;
|
||||
private const int _maxMigratedSettingsRetries = 3;
|
||||
private int _migratedSettingsRetryCount = 0;
|
||||
|
||||
public BrokerMessageListener()
|
||||
{
|
||||
}
|
||||
|
||||
public BrokerMessageListener(RunnerSettings settings, bool isMigratedSettings = false)
|
||||
{
|
||||
_settings = settings;
|
||||
_isMigratedSettings = isMigratedSettings;
|
||||
}
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_brokerServer = HostContext.GetService<IBrokerServer>();
|
||||
_credMgr = HostContext.GetService<ICredentialManager>();
|
||||
}
|
||||
|
||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
// Load settings if not provided through constructor
|
||||
if (_settings == null)
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
Trace.Info("Settings loaded from config manager");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Using provided settings");
|
||||
if (_isMigratedSettings)
|
||||
{
|
||||
Trace.Info("Using migrated settings from .runner_migrated");
|
||||
}
|
||||
}
|
||||
|
||||
var serverUrlV2 = _settings.ServerUrlV2;
|
||||
var serverUrl = _settings.ServerUrl;
|
||||
Trace.Info(_settings);
|
||||
|
||||
if (string.IsNullOrEmpty(_settings.ServerUrlV2))
|
||||
{
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
}
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
Id = _settings.AgentId,
|
||||
Name = _settings.AgentName,
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})";
|
||||
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||
|
||||
string errorMessage = string.Empty;
|
||||
bool encounteringError = false;
|
||||
|
||||
while (true)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
Trace.Info($"Attempt to create session.");
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Broker Server...");
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ConnectAsync(new Uri(serverUrlV2), _credsV2);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
if (!string.IsNullOrEmpty(serverUrl) &&
|
||||
!string.Equals(serverUrl, serverUrlV2, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info("Connecting to the Runner server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
Trace.Info("VssConnection created");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Connected to GitHub");
|
||||
_term.WriteLine();
|
||||
|
||||
_session = await _brokerServer.CreateSessionAsync(taskAgentSession, token);
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
_term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected.");
|
||||
_sessionCreationExceptionTracker.Clear();
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
if (!_handlerInitialized)
|
||||
{
|
||||
// Register event handler for auth migration state change
|
||||
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||
_handlerInitialized = true;
|
||||
}
|
||||
|
||||
return CreateSessionResult.Success;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Session creation has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
// If using migrated settings, limit the number of retries before returning failure
|
||||
if (_isMigratedSettings)
|
||||
{
|
||||
_migratedSettingsRetryCount++;
|
||||
Trace.Warning($"Migrated settings retry {_migratedSettingsRetryCount} of {_maxMigratedSettingsRetries}");
|
||||
|
||||
if (_migratedSettingsRetryCount >= _maxMigratedSettingsRetries)
|
||||
{
|
||||
Trace.Warning("Reached maximum retry attempts for migrated settings. Returning failure to try default settings.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
}
|
||||
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
ex is VssOAuthTokenRequestException vssOAuthEx &&
|
||||
_credsV2.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
// If the runner registration get deleted, we can't exchange oauth token.
|
||||
Trace.Error("Test oauth app registration.");
|
||||
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrlV2));
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
}
|
||||
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
return CreateSessionResult.SessionConflict;
|
||||
}
|
||||
return CreateSessionResult.Failure;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Session creation failed with exception: {ex}");
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
{
|
||||
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_sessionCreationRetryInterval, token);
|
||||
}
|
||||
}
|
||||
await RefreshBrokerConnection();
|
||||
return await Task.FromResult(true);
|
||||
}
|
||||
|
||||
public async Task DeleteSessionAsync()
|
||||
{
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (_handlerInitialized)
|
||||
{
|
||||
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||
}
|
||||
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _brokerServer.DeleteSessionAsync(ts.Token);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||
}
|
||||
}
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
_runnerStatus = e.Status;
|
||||
runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
@@ -283,20 +73,12 @@ namespace GitHub.Runner.Listener
|
||||
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
try
|
||||
{
|
||||
if (_needRefreshCredsV2)
|
||||
{
|
||||
Trace.Info("Refreshing broker connection.");
|
||||
await RefreshBrokerConnectionAsync();
|
||||
_needRefreshCredsV2 = false;
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
_runnerStatus,
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
_getMessagesTokenSource.Token);
|
||||
_settings.DisableUpdate);
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
@@ -320,16 +102,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||
throw;
|
||||
}
|
||||
catch (HostedRunnerDeprovisionedException)
|
||||
{
|
||||
Trace.Info("Hosted runner has been deprovisioned.");
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -338,8 +111,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during get next message.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
!IsGetNextMessageExceptionRetriable(ex))
|
||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
||||
{
|
||||
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||
}
|
||||
@@ -370,14 +142,8 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||
}
|
||||
|
||||
// re-create VssConnection before next retry
|
||||
await RefreshBrokerConnectionAsync();
|
||||
await RefreshBrokerConnection();
|
||||
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||
@@ -407,38 +173,17 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync()
|
||||
{
|
||||
await RefreshBrokerConnectionAsync();
|
||||
}
|
||||
|
||||
public async Task DeleteMessageAsync(TaskAgentMessage message)
|
||||
{
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private bool IsGetNextMessageExceptionRetriable(Exception ex)
|
||||
{
|
||||
if (ex is TaskAgentNotFoundException ||
|
||||
ex is TaskAgentPoolNotFoundException ||
|
||||
ex is TaskAgentSessionExpiredException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is RunnerNotFoundException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
@@ -451,90 +196,19 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private bool IsSessionCreationExceptionRetriable(Exception ex)
|
||||
private async Task RefreshBrokerConnection()
|
||||
{
|
||||
if (ex is TaskAgentNotFoundException)
|
||||
{
|
||||
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
|
||||
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
|
||||
return false;
|
||||
}
|
||||
else if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
Trace.Info("The session for this runner already exists.");
|
||||
_term.WriteError("A session for this runner already exists.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The session conflict exception have reached retry limit.");
|
||||
_term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1;
|
||||
}
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
_settings = configManager.LoadSettings();
|
||||
|
||||
Trace.Info("The session conflict exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
|
||||
if (_settings.ServerUrlV2 == null)
|
||||
{
|
||||
Trace.Info("Local clock might be skewed.");
|
||||
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
||||
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++;
|
||||
if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds)
|
||||
{
|
||||
Trace.Info("The OAuth token request exception have reached retry limit.");
|
||||
_term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1;
|
||||
}
|
||||
|
||||
Trace.Info("The OAuth token request exception haven't reached retry limit.");
|
||||
return true;
|
||||
}
|
||||
else if (ex is TaskAgentPoolNotFoundException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
throw new InvalidOperationException("ServerUrlV2 is not set");
|
||||
}
|
||||
|
||||
else if (ex is InvalidOperationException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Retriable exception: {ex.Message}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RefreshBrokerConnectionAsync()
|
||||
{
|
||||
Trace.Info("Reload credentials.");
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), _credsV2);
|
||||
Trace.Info("Connection to Broker Server recreated.");
|
||||
}
|
||||
|
||||
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||
{
|
||||
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||
_needRefreshCredsV2 = true;
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Task UnconfigureAsync(CommandSettings command);
|
||||
void DeleteLocalRunnerConfig();
|
||||
RunnerSettings LoadSettings();
|
||||
RunnerSettings LoadMigratedSettings();
|
||||
}
|
||||
|
||||
public sealed class ConfigurationManager : RunnerService, IConfigurationManager
|
||||
@@ -67,22 +66,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return settings;
|
||||
}
|
||||
|
||||
public RunnerSettings LoadMigratedSettings()
|
||||
{
|
||||
Trace.Info(nameof(LoadMigratedSettings));
|
||||
|
||||
// Check if migrated settings file exists
|
||||
if (!_store.IsMigratedConfigured())
|
||||
{
|
||||
throw new NonRetryableException("No migrated configuration found.");
|
||||
}
|
||||
|
||||
RunnerSettings settings = _store.GetMigratedSettings();
|
||||
Trace.Info("Migrated Settings Loaded");
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
public async Task ConfigureAsync(CommandSettings command)
|
||||
{
|
||||
_term.WriteLine();
|
||||
@@ -144,7 +127,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
runnerSettings.ServerUrl = inputUrl;
|
||||
// Get the credentials
|
||||
credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||
creds = credProvider.GetVssCredentials(HostContext);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
@@ -153,8 +136,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
runnerSettings.UseRunnerAdminFlow = authResult.UseRunnerAdminFlow;
|
||||
Trace.Info($"Using runner-admin flow: {runnerSettings.UseRunnerAdminFlow}");
|
||||
runnerSettings.UseV2Flow = authResult.UseV2Flow;
|
||||
Trace.Info($"Using V2 flow: {runnerSettings.UseV2Flow}");
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -211,7 +194,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string poolName = null;
|
||||
TaskAgentPool agentPool = null;
|
||||
List<TaskAgentPool> agentPools;
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
if (runnerSettings.UseV2Flow)
|
||||
{
|
||||
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
|
||||
}
|
||||
@@ -259,7 +242,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var userLabels = command.GetLabels();
|
||||
_term.WriteLine();
|
||||
List<TaskAgent> agents;
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
if (runnerSettings.UseV2Flow)
|
||||
{
|
||||
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||
}
|
||||
@@ -280,11 +263,10 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
if (runnerSettings.UseV2Flow)
|
||||
{
|
||||
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
@@ -292,13 +274,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -338,11 +313,10 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
try
|
||||
{
|
||||
if (runnerSettings.UseRunnerAdminFlow)
|
||||
if (runnerSettings.UseV2Flow)
|
||||
{
|
||||
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||
runnerSettings.UseV2Flow = true; // if we are using runner admin, we also need to hit broker
|
||||
|
||||
agent.Id = runner.Id;
|
||||
agent.Authorization = new TaskAgentAuthorization()
|
||||
@@ -350,13 +324,6 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(runner.RunnerAuthorization.LegacyAuthorizationUrl?.AbsoluteUri))
|
||||
{
|
||||
agent.Authorization.AuthorizationUrl = runner.RunnerAuthorization.LegacyAuthorizationUrl;
|
||||
agent.Properties["EnableAuthMigrationByDefault"] = true;
|
||||
agent.Properties["AuthorizationUrlV2"] = runner.RunnerAuthorization.AuthorizationUrl.AbsoluteUri;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -399,46 +366,25 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", true).ToString() }
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() }
|
||||
},
|
||||
};
|
||||
|
||||
if (agent.Properties.GetValue("EnableAuthMigrationByDefault", false) &&
|
||||
agent.Properties.TryGetValue<string>("AuthorizationUrlV2", out var authUrlV2) &&
|
||||
!string.IsNullOrEmpty(authUrlV2))
|
||||
{
|
||||
credentialData.Data["enableAuthMigrationByDefault"] = "true";
|
||||
credentialData.Data["authorizationUrlV2"] = authUrlV2;
|
||||
}
|
||||
|
||||
// Save the negotiated OAuth credential data
|
||||
_store.SaveCredential(credentialData);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||
}
|
||||
|
||||
// allow the server to override the serverUrlV2 and useV2Flow
|
||||
if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) &&
|
||||
!string.IsNullOrEmpty(serverUrlV2))
|
||||
{
|
||||
Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}");
|
||||
runnerSettings.ServerUrlV2 = serverUrlV2;
|
||||
}
|
||||
|
||||
if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow)
|
||||
{
|
||||
Trace.Info($"Service enforced useV2Flow: {useV2Flow}");
|
||||
runnerSettings.UseV2Flow = useV2Flow;
|
||||
}
|
||||
|
||||
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||
|
||||
if (!runnerSettings.UseV2Flow && !runnerSettings.UseRunnerAdminFlow)
|
||||
if (!runnerSettings.UseV2Flow)
|
||||
{
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
VssCredentials credential = credMgr.LoadCredentials();
|
||||
try
|
||||
{
|
||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
||||
@@ -552,50 +498,41 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (isConfigured && hasCredentials)
|
||||
{
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
var credentialManager = HostContext.GetService<ICredentialManager>();
|
||||
|
||||
if (settings.UseRunnerAdminFlow)
|
||||
// Get the credentials
|
||||
VssCredentials creds = null;
|
||||
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
||||
{
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId);
|
||||
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
{
|
||||
var credentialManager = HostContext.GetService<ICredentialManager>();
|
||||
|
||||
// Get the credentials
|
||||
VssCredentials creds = null;
|
||||
if (string.IsNullOrEmpty(settings.GitHubUrl))
|
||||
{
|
||||
var credProvider = GetCredentialProvider(command, settings.ServerUrl);
|
||||
creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false);
|
||||
Trace.Info("legacy vss cred retrieved");
|
||||
}
|
||||
else
|
||||
{
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
TaskAgent agent = agents.FirstOrDefault();
|
||||
if (agent == null)
|
||||
{
|
||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
||||
}
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Runner removed successfully");
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(settings.AgentName);
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
TaskAgent agent = agents.FirstOrDefault();
|
||||
if (agent == null)
|
||||
{
|
||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _runnerServer.DeleteAgentAsync(settings.AgentId);
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Runner removed successfully");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public interface ICredentialManager : IRunnerService
|
||||
{
|
||||
ICredentialProvider GetCredentialProvider(string credType);
|
||||
VssCredentials LoadCredentials(bool allowAuthUrlV2);
|
||||
VssCredentials LoadCredentials();
|
||||
}
|
||||
|
||||
public class CredentialManager : RunnerService, ICredentialManager
|
||||
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return creds;
|
||||
}
|
||||
|
||||
public VssCredentials LoadCredentials(bool allowAuthUrlV2)
|
||||
public VssCredentials LoadCredentials()
|
||||
{
|
||||
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
|
||||
|
||||
@@ -51,16 +51,21 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
CredentialData credData = store.GetCredentials();
|
||||
var migratedCred = store.GetMigratedCredentials();
|
||||
if (migratedCred != null &&
|
||||
migratedCred.Scheme == Constants.Configuration.OAuth)
|
||||
if (migratedCred != null)
|
||||
{
|
||||
credData = migratedCred;
|
||||
|
||||
// Re-write .credentials with Token URL
|
||||
store.SaveCredential(credData);
|
||||
|
||||
// Delete .credentials_migrated
|
||||
store.DeleteMigratedCredential();
|
||||
}
|
||||
|
||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||
credProv.CredentialData = credData;
|
||||
|
||||
VssCredentials creds = credProv.GetVssCredentials(HostContext, allowAuthUrlV2);
|
||||
VssCredentials creds = credProv.GetVssCredentials(HostContext);
|
||||
|
||||
return creds;
|
||||
}
|
||||
@@ -89,7 +94,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "use_v2_flow")]
|
||||
public bool UseRunnerAdminFlow { get; set; }
|
||||
public bool UseV2Flow { get; set; }
|
||||
|
||||
public VssCredentials ToVssCredentials()
|
||||
{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
@@ -10,7 +10,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Boolean RequireInteractive { get; }
|
||||
CredentialData CredentialData { get; set; }
|
||||
VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||
VssCredentials GetVssCredentials(IHostContext context);
|
||||
void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public virtual Boolean RequireInteractive => false;
|
||||
public CredentialData CredentialData { get; set; }
|
||||
|
||||
public abstract VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2);
|
||||
public abstract VssCredentials GetVssCredentials(IHostContext context);
|
||||
public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { }
|
||||
|
||||
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
||||
{
|
||||
ArgUtil.NotNull(context, nameof(context));
|
||||
Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential));
|
||||
|
||||
@@ -22,18 +22,10 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
// Nothing to verify here
|
||||
}
|
||||
|
||||
public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2)
|
||||
public override VssCredentials GetVssCredentials(IHostContext context)
|
||||
{
|
||||
var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null);
|
||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
var authorizationUrlV2 = this.CredentialData.Data.GetValueOrDefault("authorizationUrlV2", null);
|
||||
|
||||
if (allowAuthUrlV2 &&
|
||||
!string.IsNullOrEmpty(authorizationUrlV2) &&
|
||||
context.AllowAuthMigration)
|
||||
{
|
||||
authorizationUrl = authorizationUrlV2;
|
||||
}
|
||||
|
||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
@@ -85,5 +84,4 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
[ServiceLocator(Default = typeof(ErrorThrottler))]
|
||||
public interface IErrorThrottler : IRunnerService
|
||||
{
|
||||
void Reset();
|
||||
Task IncrementAndWaitAsync(CancellationToken token);
|
||||
}
|
||||
|
||||
public sealed class ErrorThrottler : RunnerService, IErrorThrottler
|
||||
{
|
||||
internal static readonly TimeSpan MinBackoff = TimeSpan.FromSeconds(1);
|
||||
internal static readonly TimeSpan MaxBackoff = TimeSpan.FromMinutes(1);
|
||||
internal static readonly TimeSpan BackoffCoefficient = TimeSpan.FromSeconds(1);
|
||||
private int _count = 0;
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_count = 0;
|
||||
}
|
||||
|
||||
public async Task IncrementAndWaitAsync(CancellationToken token)
|
||||
{
|
||||
if (++_count <= 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(
|
||||
attempt: _count - 2, // 0-based attempt
|
||||
minBackoff: MinBackoff,
|
||||
maxBackoff: MaxBackoff,
|
||||
deltaBackoff: BackoffCoefficient);
|
||||
Trace.Warning($"Back off {backoff.TotalSeconds} seconds before next attempt. Current consecutive error count: {_count}");
|
||||
await HostContext.Delay(backoff, token);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ namespace GitHub.Runner.Listener
|
||||
// This implementation of IJobDispatcher is not thread safe.
|
||||
// It is based on the fact that the current design of the runner is a dequeue
|
||||
// and processes one message from the message queue at a time.
|
||||
// In addition, it only executes one job every time,
|
||||
// In addition, it only executes one job every time,
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
@@ -110,12 +110,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var jwt = JsonWebToken.Create(accessToken);
|
||||
var claims = jwt.ExtractClaims();
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
}
|
||||
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (!string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
||||
@@ -550,36 +545,14 @@ namespace GitHub.Runner.Listener
|
||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||
|
||||
try
|
||||
{
|
||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
switch (jobServer)
|
||||
{
|
||||
case IJobServer js:
|
||||
{
|
||||
await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue);
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(js, message);
|
||||
}
|
||||
var jobServer = await InitializeJobServerAsync(systemConnection);
|
||||
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
|
||||
|
||||
break;
|
||||
}
|
||||
case IRunServer rs:
|
||||
await ForceFailJob(rs, message, unhandledExceptionIssue);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
|
||||
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Error($"Catch exception during log worker process unhandled exception.");
|
||||
Trace.Error(ex);
|
||||
Trace.Info($"Finish job with result 'Failed' due to IOException.");
|
||||
await ForceFailJob(jobServer, message, detailInfo);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -656,22 +629,8 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// message send failed, this might indicate worker process is already exited or stuck.
|
||||
Trace.Info($"Job cancel message sending for job {message.JobId} failed, kill running worker. {ex}");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
// wait worker to exit
|
||||
// wait worker to exit
|
||||
// if worker doesn't exit within timeout, then kill worker.
|
||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||
|
||||
@@ -1158,65 +1117,86 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// log an error issue to job level timeline record
|
||||
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
{
|
||||
try
|
||||
if (server is IJobServer jobServer)
|
||||
{
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
try
|
||||
{
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(issue);
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||
|
||||
Trace.Info("Mark the job as failed since the worker crashed");
|
||||
jobRecord.Result = TaskResult.Failed;
|
||||
// mark the job as completed so service will pickup the result
|
||||
jobRecord.State = TimelineRecordState.Completed;
|
||||
if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) &&
|
||||
StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value))
|
||||
{
|
||||
Trace.Info("Mark the job as failed since the worker crashed");
|
||||
jobRecord.Result = TaskResult.Failed;
|
||||
// mark the job as completed so service will pickup the result
|
||||
jobRecord.State = TimelineRecordState.Completed;
|
||||
}
|
||||
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
else
|
||||
{
|
||||
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// raise job completed event to fail the job.
|
||||
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
|
||||
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
|
||||
{
|
||||
try
|
||||
if (server is IJobServer jobServer)
|
||||
{
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue)
|
||||
{
|
||||
try
|
||||
{
|
||||
var annotation = issue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (annotation.HasValue)
|
||||
try
|
||||
{
|
||||
jobAnnotations.Add(annotation.Value);
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
|
||||
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise JobCompletedEvent back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, infrastructureFailureCategory: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
else if (server is IRunServer runServer)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
try
|
||||
{
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
|
||||
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
|
||||
var jobAnnotations = new List<Annotation>();
|
||||
if (unhandledAnnotation.HasValue)
|
||||
{
|
||||
jobAnnotations.Add(unhandledAnnotation.Value);
|
||||
}
|
||||
|
||||
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to raise job completion back to service.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,27 +14,16 @@ using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
public enum CreateSessionResult
|
||||
{
|
||||
Success,
|
||||
Failure,
|
||||
SessionConflict
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(MessageListener))]
|
||||
public interface IMessageListener : IRunnerService
|
||||
{
|
||||
Task<CreateSessionResult> CreateSessionAsync(CancellationToken token);
|
||||
Task<Boolean> CreateSessionAsync(CancellationToken token);
|
||||
Task DeleteSessionAsync();
|
||||
Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token);
|
||||
Task DeleteMessageAsync(TaskAgentMessage message);
|
||||
Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken);
|
||||
|
||||
Task RefreshListenerTokenAsync();
|
||||
void OnJobStatus(object sender, JobStatusEventArgs e);
|
||||
}
|
||||
|
||||
@@ -44,8 +33,6 @@ namespace GitHub.Runner.Listener
|
||||
private RunnerSettings _settings;
|
||||
private ITerminal _term;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IBrokerServer _brokerServer;
|
||||
private ICredentialManager _credMgr;
|
||||
private TaskAgentSession _session;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private bool _accessTokenRevoked = false;
|
||||
@@ -53,12 +40,8 @@ namespace GitHub.Runner.Listener
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
|
||||
private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online;
|
||||
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
|
||||
private CancellationTokenSource _getMessagesTokenSource;
|
||||
private VssCredentials _creds;
|
||||
private VssCredentials _credsV2;
|
||||
private bool _needRefreshCredsV2 = false;
|
||||
private bool _handlerInitialized = false;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -66,11 +49,9 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_brokerServer = hostContext.GetService<IBrokerServer>();
|
||||
_credMgr = hostContext.GetService<ICredentialManager>();
|
||||
}
|
||||
|
||||
public async Task<CreateSessionResult> CreateSessionAsync(CancellationToken token)
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
@@ -82,7 +63,8 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
_creds = _credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
@@ -91,8 +73,7 @@ namespace GitHub.Runner.Listener
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
var currentProcess = Process.GetCurrentProcess();
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})";
|
||||
string sessionName = $"{Environment.MachineName ?? "RUNNER"}";
|
||||
var taskAgentSession = new TaskAgentSession(sessionName, agent);
|
||||
|
||||
string errorMessage = string.Empty;
|
||||
@@ -105,7 +86,7 @@ namespace GitHub.Runner.Listener
|
||||
try
|
||||
{
|
||||
Trace.Info("Connecting to the Runner Server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds);
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
_term.WriteLine();
|
||||
@@ -116,6 +97,7 @@ namespace GitHub.Runner.Listener
|
||||
_settings.PoolId,
|
||||
taskAgentSession,
|
||||
token);
|
||||
|
||||
Trace.Info($"Session created.");
|
||||
if (encounteringError)
|
||||
{
|
||||
@@ -124,14 +106,7 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
if (!_handlerInitialized)
|
||||
{
|
||||
Trace.Info("Registering AuthMigrationChanged event handler.");
|
||||
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||
_handlerInitialized = true;
|
||||
}
|
||||
|
||||
return CreateSessionResult.Success;
|
||||
return true;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
@@ -149,13 +124,13 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
if (ex is VssOAuthTokenRequestException vssOAuthEx && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// "invalid_client" means the runner registration has been deleted from the server.
|
||||
if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
@@ -166,18 +141,14 @@ namespace GitHub.Runner.Listener
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently.");
|
||||
return CreateSessionResult.Failure;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
if (ex is TaskAgentSessionConflictException)
|
||||
{
|
||||
return CreateSessionResult.SessionConflict;
|
||||
}
|
||||
return CreateSessionResult.Failure;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
@@ -196,11 +167,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
if (_handlerInitialized)
|
||||
{
|
||||
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||
}
|
||||
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
@@ -218,7 +184,7 @@ namespace GitHub.Runner.Listener
|
||||
public void OnJobStatus(object sender, JobStatusEventArgs e)
|
||||
{
|
||||
Trace.Info("Received job status event. JobState: {0}", e.Status);
|
||||
_runnerStatus = e.Status;
|
||||
runnerStatus = e.Status;
|
||||
try
|
||||
{
|
||||
_getMessagesTokenSource?.Cancel();
|
||||
@@ -227,7 +193,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
Trace.Info("_getMessagesTokenSource is already disposed.");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
|
||||
@@ -237,7 +202,6 @@ namespace GitHub.Runner.Listener
|
||||
ArgUtil.NotNull(_settings, nameof(_settings));
|
||||
bool encounteringError = false;
|
||||
int continuousError = 0;
|
||||
int continuousEmptyMessage = 0;
|
||||
string errorMessage = string.Empty;
|
||||
Stopwatch heartbeat = new();
|
||||
heartbeat.Restart();
|
||||
@@ -251,7 +215,7 @@ namespace GitHub.Runner.Listener
|
||||
message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId,
|
||||
_session.SessionId,
|
||||
_lastMessageId,
|
||||
_runnerStatus,
|
||||
runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
@@ -261,28 +225,6 @@ namespace GitHub.Runner.Listener
|
||||
// Decrypt the message body if the session is using encryption
|
||||
message = DecryptMessage(message);
|
||||
|
||||
if (message != null && message.MessageType == BrokerMigrationMessage.MessageType)
|
||||
{
|
||||
var migrationMessage = JsonUtility.FromString<BrokerMigrationMessage>(message.Body);
|
||||
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _credsV2);
|
||||
if (_needRefreshCredsV2)
|
||||
{
|
||||
Trace.Info("Refreshing credentials for V2.");
|
||||
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||
_needRefreshCredsV2 = false;
|
||||
}
|
||||
|
||||
message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
_settings.DisableUpdate,
|
||||
token);
|
||||
}
|
||||
|
||||
if (message != null)
|
||||
{
|
||||
_lastMessageId = message.MessageId;
|
||||
@@ -311,16 +253,7 @@ namespace GitHub.Runner.Listener
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (HostedRunnerDeprovisionedException)
|
||||
{
|
||||
Trace.Info("Hosted runner has been deprovisioned.");
|
||||
throw;
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration)
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -329,19 +262,12 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during get next message.");
|
||||
Trace.Error(ex);
|
||||
|
||||
// clear out potential message for broker migration,
|
||||
// in case the exception is thrown from get message from broker-listener.
|
||||
message = null;
|
||||
|
||||
// don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs.
|
||||
if (!HostContext.AllowAuthMigration &&
|
||||
ex is TaskAgentSessionExpiredException &&
|
||||
!_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success))
|
||||
if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && await CreateSessionAsync(token))
|
||||
{
|
||||
Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session.");
|
||||
}
|
||||
else if (!HostContext.AllowAuthMigration &&
|
||||
!IsGetNextMessageExceptionRetriable(ex))
|
||||
else if (!IsGetNextMessageExceptionRetriable(ex))
|
||||
{
|
||||
throw;
|
||||
}
|
||||
@@ -368,12 +294,6 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = true;
|
||||
}
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}");
|
||||
}
|
||||
|
||||
// re-create VssConnection before next retry
|
||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||
|
||||
@@ -388,27 +308,16 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
if (message == null)
|
||||
{
|
||||
continuousEmptyMessage++;
|
||||
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
|
||||
{
|
||||
Trace.Info($"No message retrieved from session '{_session.SessionId}' within last 30 minutes.");
|
||||
heartbeat.Restart();
|
||||
continuousEmptyMessage = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"No message retrieved from session '{_session.SessionId}'.");
|
||||
}
|
||||
|
||||
if (continuousEmptyMessage > 50)
|
||||
{
|
||||
// retried more than 50 times in less than 30mins and still getting empty message
|
||||
// something is not right on the service side, backoff for 15-30s before retry
|
||||
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
|
||||
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
|
||||
await HostContext.Delay(_getNextMessageRetryInterval, token);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -431,28 +340,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RefreshListenerTokenAsync()
|
||||
{
|
||||
await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60));
|
||||
_credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
await _brokerServer.ForceRefreshConnection(_credsV2);
|
||||
}
|
||||
|
||||
public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken)
|
||||
{
|
||||
using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout
|
||||
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
|
||||
Trace.Info($"Acknowledging runner request '{runnerRequestId}'.");
|
||||
await _brokerServer.AcknowledgeRunnerRequestAsync(
|
||||
runnerRequestId,
|
||||
_session.SessionId,
|
||||
_runnerStatus,
|
||||
BuildConstants.RunnerPackage.Version,
|
||||
VarUtil.OS,
|
||||
VarUtil.OSArchitecture,
|
||||
linkedCts.Token);
|
||||
}
|
||||
|
||||
private TaskAgentMessage DecryptMessage(TaskAgentMessage message)
|
||||
{
|
||||
if (_session.EncryptionKey == null ||
|
||||
@@ -502,7 +389,6 @@ namespace GitHub.Runner.Listener
|
||||
ex is TaskAgentPoolNotFoundException ||
|
||||
ex is TaskAgentSessionExpiredException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is RunnerNotFoundException ||
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
@@ -569,8 +455,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else if (ex is TaskAgentPoolNotFoundException ||
|
||||
ex is AccessDeniedException ||
|
||||
ex is VssUnauthorizedException ||
|
||||
(ex is VssOAuthTokenRequestException oauthEx && oauthEx.Error != "server_error"))
|
||||
ex is VssUnauthorizedException)
|
||||
{
|
||||
Trace.Info($"Non-retriable exception: {ex.Message}");
|
||||
return false;
|
||||
@@ -581,11 +466,5 @@ namespace GitHub.Runner.Listener
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private void HandleAuthMigrationChanged(object sender, EventArgs e)
|
||||
{
|
||||
Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}");
|
||||
_needRefreshCredsV2 = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ using System.Reflection;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -145,12 +144,6 @@ namespace GitHub.Runner.Listener
|
||||
trace.Error(e);
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
catch (RunnerNotFoundException e)
|
||||
{
|
||||
terminal.WriteError($"An error occurred: {e.Message}");
|
||||
trace.Error(e);
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
terminal.WriteError($"An error occurred: {e.Message}");
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<SelfContained>true</SelfContained>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
|
||||
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
|
||||
@@ -19,11 +18,17 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="5.0.0" />
|
||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="5.0.0" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="8.0.0" />
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
|
||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
||||
</EmbeddedResource>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Security.Claims;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -16,9 +14,7 @@ using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
@@ -35,35 +31,11 @@ namespace GitHub.Runner.Listener
|
||||
private ITerminal _term;
|
||||
private bool _inConfigStage;
|
||||
private ManualResetEvent _completedCommand = new(false);
|
||||
private readonly ConcurrentQueue<string> _authMigrationTelemetries = new();
|
||||
private Task _authMigrationTelemetryTask;
|
||||
private readonly object _authMigrationTelemetryLock = new();
|
||||
private Task _authMigrationClaimsCheckTask;
|
||||
private readonly object _authMigrationClaimsCheckLock = new();
|
||||
private IRunnerServer _runnerServer;
|
||||
private CancellationTokenSource _authMigrationTelemetryTokenSource = new();
|
||||
private CancellationTokenSource _authMigrationClaimsCheckTokenSource = new();
|
||||
|
||||
// <summary>
|
||||
// Helps avoid excessive calls to Run Service when encountering non-retriable errors from /acquirejob.
|
||||
// Normally we rely on the HTTP clients to back off between retry attempts. However, acquiring a job
|
||||
// involves calls to both Run Serivce and Broker. And Run Service and Broker communicate with each other
|
||||
// in an async fashion.
|
||||
//
|
||||
// When Run Service encounters a non-retriable error, it sends an async message to Broker. The runner will,
|
||||
// however, immediately call Broker to get the next message. If the async event from Run Service to Broker
|
||||
// has not yet been processed, the next message from Broker may be the same job message.
|
||||
//
|
||||
// The error throttler helps us back off when encountering successive, non-retriable errors from /acquirejob.
|
||||
// </summary>
|
||||
private IErrorThrottler _acquireJobThrottler;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_acquireJobThrottler = HostContext.CreateService<IErrorThrottler>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
}
|
||||
|
||||
public async Task<int> ExecuteCommand(CommandSettings command)
|
||||
@@ -79,8 +51,6 @@ namespace GitHub.Runner.Listener
|
||||
//register a SIGTERM handler
|
||||
HostContext.Unloading += Runner_Unloading;
|
||||
|
||||
HostContext.AuthMigrationChanged += HandleAuthMigrationChanged;
|
||||
|
||||
// TODO Unit test to cover this logic
|
||||
Trace.Info(nameof(ExecuteCommand));
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
@@ -243,21 +213,15 @@ namespace GitHub.Runner.Listener
|
||||
var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key);
|
||||
var configContent = Convert.FromBase64String(config.Value);
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials))
|
||||
{
|
||||
configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine);
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
File.WriteAllBytes(configFile, configContent);
|
||||
File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden);
|
||||
Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'.");
|
||||
}
|
||||
|
||||
// make sure we have the right user agent data added from the jitconfig
|
||||
HostContext.LoadDefaultUserAgents();
|
||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -315,17 +279,8 @@ namespace GitHub.Runner.Listener
|
||||
_term.WriteLine("https://docs.github.com/en/actions/hosting-your-own-runners/autoscaling-with-self-hosted-runners#using-ephemeral-runners-for-autoscaling", ConsoleColor.Yellow);
|
||||
}
|
||||
|
||||
var cred = store.GetCredentials();
|
||||
if (cred != null &&
|
||||
cred.Scheme == Constants.Configuration.OAuth &&
|
||||
cred.Data.ContainsKey("EnableAuthMigrationByDefault"))
|
||||
{
|
||||
Trace.Info("Enable auth migration by default.");
|
||||
HostContext.EnableAuthMigration("EnableAuthMigrationByDefault");
|
||||
}
|
||||
|
||||
// Run the runner interactively or as service
|
||||
return await ExecuteRunnerAsync(settings, command.RunOnce || settings.Ephemeral);
|
||||
return await RunAsync(settings, command.RunOnce || settings.Ephemeral);
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -335,9 +290,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
finally
|
||||
{
|
||||
_authMigrationClaimsCheckTokenSource?.Cancel();
|
||||
_authMigrationTelemetryTokenSource?.Cancel();
|
||||
HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged;
|
||||
_term.CancelKeyPress -= CtrlCHandler;
|
||||
HostContext.Unloading -= Runner_Unloading;
|
||||
_completedCommand.Set();
|
||||
@@ -387,12 +339,12 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private IMessageListener GetMessageListener(RunnerSettings settings, bool isMigratedSettings = false)
|
||||
private IMessageListener GetMesageListener(RunnerSettings settings)
|
||||
{
|
||||
if (settings.UseV2Flow)
|
||||
{
|
||||
Trace.Info($"Using BrokerMessageListener");
|
||||
var brokerListener = new BrokerMessageListener(settings, isMigratedSettings);
|
||||
var brokerListener = new BrokerMessageListener();
|
||||
brokerListener.Initialize(HostContext);
|
||||
return brokerListener;
|
||||
}
|
||||
@@ -406,65 +358,10 @@ namespace GitHub.Runner.Listener
|
||||
try
|
||||
{
|
||||
Trace.Info(nameof(RunAsync));
|
||||
|
||||
// First try using migrated settings if available
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
RunnerSettings migratedSettings = null;
|
||||
|
||||
try
|
||||
_listener = GetMesageListener(settings);
|
||||
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
|
||||
{
|
||||
migratedSettings = configManager.LoadMigratedSettings();
|
||||
Trace.Info("Loaded migrated settings from .runner_migrated file");
|
||||
Trace.Info(migratedSettings);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// If migrated settings file doesn't exist or can't be loaded, we'll use the provided settings
|
||||
Trace.Info($"Failed to load migrated settings: {ex.Message}");
|
||||
}
|
||||
|
||||
bool usedMigratedSettings = false;
|
||||
|
||||
if (migratedSettings != null)
|
||||
{
|
||||
// Try to create session with migrated settings first
|
||||
Trace.Info("Attempting to create session using migrated settings");
|
||||
_listener = GetMessageListener(migratedSettings, isMigratedSettings: true);
|
||||
|
||||
try
|
||||
{
|
||||
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
|
||||
if (createSessionResult == CreateSessionResult.Success)
|
||||
{
|
||||
Trace.Info("Successfully created session with migrated settings");
|
||||
settings = migratedSettings; // Use migrated settings for the rest of the process
|
||||
usedMigratedSettings = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning($"Failed to create session with migrated settings: {createSessionResult}");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Exception when creating session with migrated settings: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
// If migrated settings weren't used or session creation failed, use original settings
|
||||
if (!usedMigratedSettings)
|
||||
{
|
||||
Trace.Info("Falling back to original .runner settings");
|
||||
_listener = GetMessageListener(settings);
|
||||
CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken);
|
||||
if (createSessionResult == CreateSessionResult.SessionConflict)
|
||||
{
|
||||
return Constants.Runner.ReturnCode.SessionConflict;
|
||||
}
|
||||
else if (createSessionResult == CreateSessionResult.Failure)
|
||||
{
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
|
||||
HostContext.WritePerfCounter("SessionCreated");
|
||||
@@ -478,8 +375,6 @@ namespace GitHub.Runner.Listener
|
||||
// Should we try to cleanup ephemeral runners
|
||||
bool runOnceJobCompleted = false;
|
||||
bool skipSessionDeletion = false;
|
||||
bool restartSession = false; // Flag to indicate session restart
|
||||
bool restartSessionPending = false;
|
||||
try
|
||||
{
|
||||
var notification = HostContext.GetService<IJobNotification>();
|
||||
@@ -495,15 +390,6 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
while (!HostContext.RunnerShutdownToken.IsCancellationRequested)
|
||||
{
|
||||
// Check if we need to restart the session and can do so (job dispatcher not busy)
|
||||
if (restartSessionPending && !jobDispatcher.Busy)
|
||||
{
|
||||
Trace.Info("Pending session restart detected and job dispatcher is not busy. Restarting session now.");
|
||||
messageQueueLoopTokenSource.Cancel();
|
||||
restartSession = true;
|
||||
break;
|
||||
}
|
||||
|
||||
TaskAgentMessage message = null;
|
||||
bool skipMessageDeletion = false;
|
||||
try
|
||||
@@ -654,74 +540,36 @@ namespace GitHub.Runner.Listener
|
||||
else
|
||||
{
|
||||
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
|
||||
|
||||
// Acknowledge (best-effort)
|
||||
if (messageRef.ShouldAcknowledge) // Temporary feature flag
|
||||
{
|
||||
try
|
||||
{
|
||||
await _listener.AcknowledgeMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Best-effort acknowledge failed for request '{messageRef.RunnerRequestId}'");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
|
||||
|
||||
// Create connection
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
var creds = credMgr.LoadCredentials();
|
||||
|
||||
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
|
||||
{
|
||||
// Connect
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
var creds = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
|
||||
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
// Get job message
|
||||
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Connect
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
var credsV2 = credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
var runServer = HostContext.CreateService<IRunServer>();
|
||||
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), credsV2);
|
||||
|
||||
// Get job message
|
||||
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
|
||||
try
|
||||
{
|
||||
jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageRef.BillingOwnerId, messageQueueLoopTokenSource.Token);
|
||||
_acquireJobThrottler.Reset();
|
||||
jobRequestMessage =
|
||||
await runServer.GetJobMessageAsync(messageRef.RunnerRequestId,
|
||||
messageQueueLoopTokenSource.Token);
|
||||
}
|
||||
catch (Exception ex) when (
|
||||
ex is TaskOrchestrationJobNotFoundException || // HTTP status 404
|
||||
ex is TaskOrchestrationJobAlreadyAcquiredException || // HTTP status 409
|
||||
ex is TaskOrchestrationJobUnprocessableException) // HTTP status 422
|
||||
catch (TaskOrchestrationJobAlreadyAcquiredException)
|
||||
{
|
||||
Trace.Info($"Skipping message Job. {ex.Message}");
|
||||
await _acquireJobThrottler.IncrementAndWaitAsync(messageQueueLoopTokenSource.Token);
|
||||
continue;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Caught exception from acquiring job message: {ex}");
|
||||
|
||||
if (HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Disable migration mode for 60 minutes.");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Acquire job failed with exception: {ex}");
|
||||
}
|
||||
|
||||
Trace.Info("Job is already acquired, skip this message.");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Dispatch
|
||||
jobDispatcher.Run(jobRequestMessage, runOnce);
|
||||
|
||||
// Run once?
|
||||
if (runOnce)
|
||||
{
|
||||
Trace.Info("One time used runner received job message.");
|
||||
@@ -748,33 +596,6 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'.");
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh))
|
||||
{
|
||||
Trace.Info("Received ForceTokenRefreshMessage");
|
||||
await _listener.RefreshListenerTokenAsync();
|
||||
}
|
||||
else if (string.Equals(message.MessageType, RunnerRefreshConfigMessage.MessageType))
|
||||
{
|
||||
var runnerRefreshConfigMessage = JsonUtility.FromString<RunnerRefreshConfigMessage>(message.Body);
|
||||
Trace.Info($"Received RunnerRefreshConfigMessage for '{runnerRefreshConfigMessage.ConfigType}' config file");
|
||||
var configUpdater = HostContext.GetService<IRunnerConfigUpdater>();
|
||||
await configUpdater.UpdateRunnerConfigAsync(
|
||||
runnerQualifiedId: runnerRefreshConfigMessage.RunnerQualifiedId,
|
||||
configType: runnerRefreshConfigMessage.ConfigType,
|
||||
serviceType: runnerRefreshConfigMessage.ServiceType,
|
||||
configRefreshUrl: runnerRefreshConfigMessage.ConfigRefreshUrl);
|
||||
|
||||
// Set flag to schedule session restart if ConfigType is "runner"
|
||||
if (string.Equals(runnerRefreshConfigMessage.ConfigType, "runner", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info("Runner configuration was updated. Session restart has been scheduled");
|
||||
restartSessionPending = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"No session restart needed for config type: {runnerRefreshConfigMessage.ConfigType}");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}.");
|
||||
@@ -813,7 +634,6 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info("Deleting Runner Session...");
|
||||
await _listener.DeleteSessionAsync();
|
||||
}
|
||||
catch (Exception ex) when (runOnce)
|
||||
@@ -828,243 +648,19 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
if (settings.Ephemeral && runOnceJobCompleted)
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
configManager.DeleteLocalRunnerConfig();
|
||||
}
|
||||
}
|
||||
|
||||
// After cleanup, check if we need to restart the session
|
||||
if (restartSession)
|
||||
{
|
||||
Trace.Info("Restarting runner session after config update...");
|
||||
return Constants.Runner.ReturnCode.RunnerConfigurationRefreshed;
|
||||
}
|
||||
}
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Shutting down.");
|
||||
}
|
||||
catch (HostedRunnerDeprovisionedException)
|
||||
{
|
||||
Trace.Info("Hosted runner has been deprovisioned. Shutting down.");
|
||||
}
|
||||
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
private async Task<int> ExecuteRunnerAsync(RunnerSettings settings, bool runOnce)
|
||||
{
|
||||
int returnCode = Constants.Runner.ReturnCode.Success;
|
||||
bool restart = false;
|
||||
do
|
||||
{
|
||||
restart = false;
|
||||
returnCode = await RunAsync(settings, runOnce);
|
||||
|
||||
if (returnCode == Constants.Runner.ReturnCode.RunnerConfigurationRefreshed)
|
||||
{
|
||||
Trace.Info("Runner configuration was refreshed, restarting session...");
|
||||
// Reload settings in case they changed
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
settings = configManager.LoadSettings();
|
||||
restart = true;
|
||||
}
|
||||
} while (restart);
|
||||
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
private void HandleAuthMigrationChanged(object sender, AuthMigrationEventArgs e)
|
||||
{
|
||||
Trace.Verbose("Handle AuthMigrationChanged in Runner");
|
||||
_authMigrationTelemetries.Enqueue($"{DateTime.UtcNow.ToString("O")}: {e.Trace}");
|
||||
|
||||
// only start the telemetry reporting task once auth migration is changed (enabled or disabled)
|
||||
lock (_authMigrationTelemetryLock)
|
||||
{
|
||||
if (_authMigrationTelemetryTask == null)
|
||||
{
|
||||
_authMigrationTelemetryTask = ReportAuthMigrationTelemetryAsync(_authMigrationTelemetryTokenSource.Token);
|
||||
}
|
||||
}
|
||||
|
||||
// only start the claims check task once auth migration is changed (enabled or disabled)
|
||||
lock (_authMigrationClaimsCheckLock)
|
||||
{
|
||||
if (_authMigrationClaimsCheckTask == null)
|
||||
{
|
||||
_authMigrationClaimsCheckTask = CheckOAuthTokenClaimsAsync(_authMigrationClaimsCheckTokenSource.Token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CheckOAuthTokenClaimsAsync(CancellationToken token)
|
||||
{
|
||||
string[] expectedClaims =
|
||||
[
|
||||
"owner_id",
|
||||
"runner_id",
|
||||
"runner_group_id",
|
||||
"scale_set_id",
|
||||
"is_ephemeral",
|
||||
"labels"
|
||||
];
|
||||
|
||||
try
|
||||
{
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await HostContext.Delay(TimeSpan.FromMinutes(100), token);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// Ignore cancellation
|
||||
}
|
||||
|
||||
if (token.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (!HostContext.AllowAuthMigration)
|
||||
{
|
||||
Trace.Info("Skip checking oauth token claims since auth migration is disabled.");
|
||||
continue;
|
||||
}
|
||||
|
||||
var baselineCred = credMgr.LoadCredentials(allowAuthUrlV2: false);
|
||||
var authV2Cred = credMgr.LoadCredentials(allowAuthUrlV2: true);
|
||||
|
||||
if (!(baselineCred.Federated is VssOAuthCredential baselineVssOAuthCred) ||
|
||||
!(authV2Cred.Federated is VssOAuthCredential vssOAuthCredV2) ||
|
||||
baselineVssOAuthCred == null ||
|
||||
vssOAuthCredV2 == null)
|
||||
{
|
||||
Trace.Info("Skip checking oauth token claims for non-oauth credentials");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (string.Equals(baselineVssOAuthCred.AuthorizationUrl.AbsoluteUri, vssOAuthCredV2.AuthorizationUrl.AbsoluteUri, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info("Skip checking oauth token claims for same authorization url");
|
||||
continue;
|
||||
}
|
||||
|
||||
var baselineProvider = baselineVssOAuthCred.GetTokenProvider(baselineVssOAuthCred.AuthorizationUrl);
|
||||
var v2Provider = vssOAuthCredV2.GetTokenProvider(vssOAuthCredV2.AuthorizationUrl);
|
||||
try
|
||||
{
|
||||
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
using (var requestTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token, timeoutTokenSource.Token))
|
||||
{
|
||||
var baselineToken = await baselineProvider.GetTokenAsync(null, requestTokenSource.Token);
|
||||
var v2Token = await v2Provider.GetTokenAsync(null, requestTokenSource.Token);
|
||||
if (baselineToken is VssOAuthAccessToken baselineAccessToken &&
|
||||
v2Token is VssOAuthAccessToken v2AccessToken &&
|
||||
!string.IsNullOrEmpty(baselineAccessToken.Value) &&
|
||||
!string.IsNullOrEmpty(v2AccessToken.Value))
|
||||
{
|
||||
var baselineJwt = JsonWebToken.Create(baselineAccessToken.Value);
|
||||
var baselineClaims = baselineJwt.ExtractClaims();
|
||||
var v2Jwt = JsonWebToken.Create(v2AccessToken.Value);
|
||||
var v2Claims = v2Jwt.ExtractClaims();
|
||||
|
||||
// Log extracted claims for debugging
|
||||
Trace.Verbose($"Baseline token expected claims: {string.Join(", ", baselineClaims
|
||||
.Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant()))
|
||||
.Select(c => $"{c.Type}:{c.Value}"))}");
|
||||
Trace.Verbose($"V2 token expected claims: {string.Join(", ", v2Claims
|
||||
.Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant()))
|
||||
.Select(c => $"{c.Type}:{c.Value}"))}");
|
||||
|
||||
foreach (var claim in expectedClaims)
|
||||
{
|
||||
// if baseline has the claim, v2 should have it too with exactly same value.
|
||||
if (baselineClaims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim) is Claim baselineClaim &&
|
||||
!string.IsNullOrEmpty(baselineClaim?.Value))
|
||||
{
|
||||
var v2Claim = v2Claims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim);
|
||||
if (v2Claim?.Value != baselineClaim.Value)
|
||||
{
|
||||
Trace.Info($"Token Claim mismatch between two issuers. Expected: {baselineClaim.Type}:{baselineClaim.Value}. Actual: {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}");
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Expected claim {baselineClaim.Type}:{baselineClaim.Value} does not match {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info("OAuth token claims check passed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Failed to fetch and check OAuth token claims.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Failed to check OAuth token claims in background.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ReportAuthMigrationTelemetryAsync(CancellationToken token)
|
||||
{
|
||||
var configManager = HostContext.GetService<IConfigurationManager>();
|
||||
var runnerSettings = configManager.LoadSettings();
|
||||
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// Ignore cancellation
|
||||
}
|
||||
|
||||
Trace.Verbose("Checking for auth migration telemetry to report");
|
||||
while (_authMigrationTelemetries.TryDequeue(out var telemetry))
|
||||
{
|
||||
Trace.Verbose($"Reporting auth migration telemetry: {telemetry}");
|
||||
if (runnerSettings != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _runnerServer.UpdateAgentUpdateStateAsync(runnerSettings.PoolId, runnerSettings.AgentId, "RefreshConfig", telemetry, tokenSource.Token);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Failed to report auth migration telemetry.");
|
||||
Trace.Error(ex);
|
||||
_authMigrationTelemetries.Enqueue(telemetry);
|
||||
}
|
||||
}
|
||||
|
||||
if (!token.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await HostContext.Delay(TimeSpan.FromSeconds(10), token);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// Ignore cancellation
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void PrintUsage(CommandSettings command)
|
||||
{
|
||||
string separator;
|
||||
|
||||
@@ -1,287 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
[ServiceLocator(Default = typeof(RunnerConfigUpdater))]
|
||||
public interface IRunnerConfigUpdater : IRunnerService
|
||||
{
|
||||
Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl);
|
||||
}
|
||||
|
||||
public sealed class RunnerConfigUpdater : RunnerService, IRunnerConfigUpdater
|
||||
{
|
||||
private RunnerSettings _settings;
|
||||
private CredentialData _credData;
|
||||
private IRunnerServer _runnerServer;
|
||||
private IConfigurationStore _store;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_store = hostContext.GetService<IConfigurationStore>();
|
||||
_settings = _store.GetSettings();
|
||||
_credData = _store.GetCredentials();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
}
|
||||
|
||||
public async Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl)
|
||||
{
|
||||
Trace.Entering();
|
||||
try
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(runnerQualifiedId, nameof(runnerQualifiedId));
|
||||
ArgUtil.NotNullOrEmpty(configType, nameof(configType));
|
||||
ArgUtil.NotNullOrEmpty(serviceType, nameof(serviceType));
|
||||
ArgUtil.NotNullOrEmpty(configRefreshUrl, nameof(configRefreshUrl));
|
||||
|
||||
// make sure the runner qualified id matches the current runner
|
||||
if (!await VerifyRunnerQualifiedId(runnerQualifiedId))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// keep the timeout short to avoid blocking the main thread
|
||||
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
switch (configType.ToLowerInvariant())
|
||||
{
|
||||
case "runner":
|
||||
await UpdateRunnerSettingsAsync(serviceType, configRefreshUrl, tokenSource.Token);
|
||||
break;
|
||||
case "credentials":
|
||||
await UpdateRunnerCredentialsAsync(serviceType, configRefreshUrl, tokenSource.Token);
|
||||
break;
|
||||
default:
|
||||
Trace.Error($"Invalid config type '{configType}'.");
|
||||
await ReportTelemetryAsync($"Invalid config type '{configType}'.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to update runner '{configType}' config.");
|
||||
Trace.Error(ex);
|
||||
await ReportTelemetryAsync($"Failed to update runner '{configType}' config: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task UpdateRunnerSettingsAsync(string serviceType, string configRefreshUrl, CancellationToken token)
|
||||
{
|
||||
Trace.Entering();
|
||||
// read the current runner settings and encode with base64
|
||||
var runnerConfig = HostContext.GetConfigFile(WellKnownConfigFile.Runner);
|
||||
string runnerConfigContent = File.ReadAllText(runnerConfig, Encoding.UTF8);
|
||||
var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(runnerConfigContent));
|
||||
if (string.IsNullOrEmpty(encodedConfig))
|
||||
{
|
||||
await ReportTelemetryAsync("Failed to get encoded runner settings.");
|
||||
return;
|
||||
}
|
||||
|
||||
// exchange the encoded runner settings with the service
|
||||
string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "runner", configRefreshUrl, token);
|
||||
if (string.IsNullOrEmpty(refreshedEncodedConfig))
|
||||
{
|
||||
// service will return empty string if there is no change in the config
|
||||
return;
|
||||
}
|
||||
|
||||
var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig));
|
||||
RunnerSettings refreshedRunnerConfig;
|
||||
try
|
||||
{
|
||||
refreshedRunnerConfig = StringUtil.ConvertFromJson<RunnerSettings>(decodedConfig);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to convert runner config from json '{decodedConfig}'.");
|
||||
Trace.Error(ex);
|
||||
await ReportTelemetryAsync($"Failed to convert runner config '{decodedConfig}' from json: {ex}");
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure the runner id and name in the refreshed config match the current runner
|
||||
if (refreshedRunnerConfig?.AgentId != _settings.AgentId)
|
||||
{
|
||||
Trace.Error($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'.");
|
||||
await ReportTelemetryAsync($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (refreshedRunnerConfig?.AgentName != _settings.AgentName)
|
||||
{
|
||||
Trace.Error($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'.");
|
||||
await ReportTelemetryAsync($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'.");
|
||||
return;
|
||||
}
|
||||
|
||||
// save the refreshed runner settings as a separate file
|
||||
_store.SaveMigratedSettings(refreshedRunnerConfig);
|
||||
await ReportTelemetryAsync("Runner settings updated successfully.");
|
||||
}
|
||||
|
||||
private async Task UpdateRunnerCredentialsAsync(string serviceType, string configRefreshUrl, CancellationToken token)
|
||||
{
|
||||
Trace.Entering();
|
||||
// read the current runner credentials and encode with base64
|
||||
var credConfig = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
string credConfigContent = File.ReadAllText(credConfig, Encoding.UTF8);
|
||||
var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(credConfigContent));
|
||||
if (string.IsNullOrEmpty(encodedConfig))
|
||||
{
|
||||
await ReportTelemetryAsync("Failed to get encoded credentials.");
|
||||
return;
|
||||
}
|
||||
|
||||
CredentialData currentCred = _store.GetCredentials();
|
||||
if (currentCred == null)
|
||||
{
|
||||
await ReportTelemetryAsync("Failed to get current credentials.");
|
||||
return;
|
||||
}
|
||||
|
||||
// we only support refreshing OAuth credentials which is used by self-hosted runners.
|
||||
if (currentCred.Scheme != Constants.Configuration.OAuth)
|
||||
{
|
||||
await ReportTelemetryAsync($"Not supported credential scheme '{currentCred.Scheme}'.");
|
||||
return;
|
||||
}
|
||||
|
||||
// exchange the encoded runner credentials with the service
|
||||
string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "credentials", configRefreshUrl, token);
|
||||
if (string.IsNullOrEmpty(refreshedEncodedConfig))
|
||||
{
|
||||
// service will return empty string if there is no change in the config
|
||||
return;
|
||||
}
|
||||
|
||||
var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig));
|
||||
CredentialData refreshedCredConfig;
|
||||
try
|
||||
{
|
||||
refreshedCredConfig = StringUtil.ConvertFromJson<CredentialData>(decodedConfig);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to convert credentials config from json '{decodedConfig}'.");
|
||||
Trace.Error(ex);
|
||||
await ReportTelemetryAsync($"Failed to convert credentials config '{decodedConfig}' from json: {ex}");
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure the credential scheme in the refreshed config match the current credential scheme
|
||||
if (refreshedCredConfig?.Scheme != _credData.Scheme)
|
||||
{
|
||||
Trace.Error($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'.");
|
||||
await ReportTelemetryAsync($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (_credData.Scheme == Constants.Configuration.OAuth)
|
||||
{
|
||||
// make sure the credential clientId in the refreshed config match the current credential clientId for OAuth auth scheme
|
||||
var clientId = _credData.Data.GetValueOrDefault("clientId", null);
|
||||
var refreshedClientId = refreshedCredConfig.Data.GetValueOrDefault("clientId", null);
|
||||
if (clientId != refreshedClientId)
|
||||
{
|
||||
Trace.Error($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'.");
|
||||
await ReportTelemetryAsync($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'.");
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure the credential authorizationUrl in the refreshed config match the current credential authorizationUrl for OAuth auth scheme
|
||||
var authorizationUrl = _credData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
var refreshedAuthorizationUrl = refreshedCredConfig.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
if (authorizationUrl != refreshedAuthorizationUrl)
|
||||
{
|
||||
Trace.Error($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'.");
|
||||
await ReportTelemetryAsync($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// save the refreshed runner credentials as a separate file
|
||||
_store.SaveMigratedCredential(refreshedCredConfig);
|
||||
|
||||
if (refreshedCredConfig.Data.ContainsKey("authorizationUrlV2"))
|
||||
{
|
||||
HostContext.EnableAuthMigration("Credential file updated");
|
||||
await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is enabled.");
|
||||
}
|
||||
else
|
||||
{
|
||||
HostContext.DeferAuthMigration(TimeSpan.FromDays(365), "Credential file does not contain authorizationUrlV2");
|
||||
await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is disabled.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> VerifyRunnerQualifiedId(string runnerQualifiedId)
|
||||
{
|
||||
Trace.Entering();
|
||||
Trace.Info($"Verifying runner qualified id: {runnerQualifiedId}");
|
||||
var idParts = runnerQualifiedId.Split("/", StringSplitOptions.RemoveEmptyEntries);
|
||||
if (idParts.Length != 4 || idParts[3] != _settings.AgentId.ToString())
|
||||
{
|
||||
Trace.Error($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'.");
|
||||
await ReportTelemetryAsync($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'.");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private async Task<string> RefreshRunnerConfigAsync(string encodedConfig, string serviceType, string configType, string configRefreshUrl, CancellationToken token)
|
||||
{
|
||||
string refreshedEncodedConfig;
|
||||
switch (serviceType.ToLowerInvariant())
|
||||
{
|
||||
case "pipelines":
|
||||
try
|
||||
{
|
||||
refreshedEncodedConfig = await _runnerServer.RefreshRunnerConfigAsync((int)_settings.AgentId, configType, encodedConfig, token);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to refresh runner {configType} config with service.");
|
||||
Trace.Error(ex);
|
||||
await ReportTelemetryAsync($"Failed to refresh {configType} config: {ex}");
|
||||
return null;
|
||||
}
|
||||
break;
|
||||
case "runner-admin":
|
||||
throw new NotSupportedException("Runner admin service is not supported.");
|
||||
default:
|
||||
Trace.Error($"Invalid service type '{serviceType}'.");
|
||||
await ReportTelemetryAsync($"Invalid service type '{serviceType}'.");
|
||||
return null;
|
||||
}
|
||||
|
||||
return refreshedEncodedConfig;
|
||||
}
|
||||
|
||||
private async Task ReportTelemetryAsync(string telemetry)
|
||||
{
|
||||
Trace.Entering();
|
||||
try
|
||||
{
|
||||
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _runnerServer.UpdateAgentUpdateStateAsync(_settings.PoolId, _settings.AgentId, "RefreshConfig", telemetry, tokenSource.Token);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Failed to report telemetry.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,17 +7,9 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
[DataMember(Name = "id")]
|
||||
public string Id { get; set; }
|
||||
|
||||
[DataMember(Name = "runner_request_id")]
|
||||
public string RunnerRequestId { get; set; }
|
||||
|
||||
[DataMember(Name = "should_acknowledge")]
|
||||
public bool ShouldAcknowledge { get; set; }
|
||||
|
||||
[DataMember(Name = "run_service_url")]
|
||||
public string RunServiceUrl { get; set; }
|
||||
|
||||
[DataMember(Name = "billing_owner_id")]
|
||||
public string BillingOwnerId { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,11 +6,13 @@ using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
@@ -28,14 +30,20 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
private static string _packageType = "agent";
|
||||
private static string _platform = BuildConstants.RunnerPackage.PackageName;
|
||||
private static string _dotnetRuntime = "dotnetRuntime";
|
||||
private static string _externals = "externals";
|
||||
private readonly Dictionary<string, string> _contentHashes = new();
|
||||
|
||||
private PackageMetadata _targetPackage;
|
||||
private ITerminal _terminal;
|
||||
private IRunnerServer _runnerServer;
|
||||
private int _poolId;
|
||||
private ulong _agentId;
|
||||
private const int _numberOfOldVersionsToKeep = 1;
|
||||
private readonly ConcurrentQueue<string> _updateTrace = new();
|
||||
private Task _cloneAndCalculateContentHashTask;
|
||||
private string _dotnetRuntimeCloneDirectory;
|
||||
private string _externalsCloneDirectory;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -48,6 +56,8 @@ namespace GitHub.Runner.Listener
|
||||
var settings = configStore.GetSettings();
|
||||
_poolId = settings.PoolId;
|
||||
_agentId = settings.AgentId;
|
||||
_dotnetRuntimeCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__dotnet_runtime__");
|
||||
_externalsCloneDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals__");
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
@@ -57,6 +67,13 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
var totalUpdateTime = Stopwatch.StartNew();
|
||||
|
||||
// Copy dotnet runtime and externals of current runner to a temp folder
|
||||
// So we can re-use them with trimmed runner package, if possible.
|
||||
// This process is best effort, if we can't use trimmed runner package,
|
||||
// we will just go with the full package.
|
||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
_cloneAndCalculateContentHashTask = CloneAndCalculateAssetsHash(_dotnetRuntimeCloneDirectory, _externalsCloneDirectory, linkedTokenSource.Token);
|
||||
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
@@ -70,6 +87,24 @@ namespace GitHub.Runner.Listener
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
|
||||
if (_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
// wait for cloning assets task to finish only if we have trimmed packages
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
else
|
||||
{
|
||||
linkedTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await _cloneAndCalculateContentHashTask;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Info($"Ingore errors after cancelling cloning assets task: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
await DownloadLatestRunner(token, updateMessage.TargetVersion);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
@@ -183,8 +218,54 @@ namespace GitHub.Runner.Listener
|
||||
string archiveFile = null;
|
||||
var packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
var packageHashValue = _targetPackage.HashValue;
|
||||
var runtimeTrimmed = false;
|
||||
var externalsTrimmed = false;
|
||||
var fallbackToFullPackage = false;
|
||||
|
||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
||||
if (_contentHashes.Count == 2 &&
|
||||
_contentHashes.ContainsKey(_dotnetRuntime) &&
|
||||
_contentHashes.ContainsKey(_externals) &&
|
||||
_targetPackage.TrimmedPackages?.Count > 0)
|
||||
{
|
||||
Trace.Info($"Current runner content hash: {StringUtil.ConvertToJson(_contentHashes)}");
|
||||
Trace.Info($"Trimmed packages info from service: {StringUtil.ConvertToJson(_targetPackage.TrimmedPackages)}");
|
||||
// Try to see whether we can use any size trimmed down package to speed up runner updates.
|
||||
foreach (var trimmedPackage in _targetPackage.TrimmedPackages)
|
||||
{
|
||||
if (trimmedPackage.TrimmedContents.Count == 2 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_dotnetRuntime, out var trimmedRuntimeHash) &&
|
||||
trimmedRuntimeHash == _contentHashes[_dotnetRuntime] &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out var trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (runtime+externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
runtimeTrimmed = true;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else if (trimmedPackage.TrimmedContents.Count == 1 &&
|
||||
trimmedPackage.TrimmedContents.TryGetValue(_externals, out trimmedExternalsHash) &&
|
||||
trimmedExternalsHash == _contentHashes[_externals])
|
||||
{
|
||||
Trace.Info($"Use trimmed (externals) package '{trimmedPackage.DownloadUrl}' to update runner.");
|
||||
packageDownloadUrl = trimmedPackage.DownloadUrl;
|
||||
packageHashValue = trimmedPackage.HashValue;
|
||||
externalsTrimmed = true;
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Can't use trimmed package from '{trimmedPackage.DownloadUrl}' since the current runner does not carry those trimmed content (Hash mismatch).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
_updateTrace.Enqueue($"RuntimeTrimmed: {runtimeTrimmed}");
|
||||
_updateTrace.Enqueue($"ExternalsTrimmed: {externalsTrimmed}");
|
||||
|
||||
try
|
||||
{
|
||||
@@ -242,6 +323,12 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
catch (Exception ex) when (runtimeTrimmed || externalsTrimmed)
|
||||
{
|
||||
// if anything failed when we use trimmed package (download/validatehase/extract), try again with the full runner package.
|
||||
Trace.Error($"Fail to download latest runner using trimmed package: {ex}");
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
@@ -260,6 +347,74 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
var trimmedPackageRestoreTasks = new List<Task<bool>>();
|
||||
if (!fallbackToFullPackage)
|
||||
{
|
||||
// Skip restoring externals and runtime if we are going to fullback to the full package.
|
||||
if (externalsTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedExternals(latestRunnerDirectory, token));
|
||||
}
|
||||
if (runtimeTrimmed)
|
||||
{
|
||||
trimmedPackageRestoreTasks.Add(RestoreTrimmedDotnetRuntime(latestRunnerDirectory, token));
|
||||
}
|
||||
}
|
||||
|
||||
if (trimmedPackageRestoreTasks.Count > 0)
|
||||
{
|
||||
var restoreResults = await Task.WhenAll(trimmedPackageRestoreTasks);
|
||||
if (restoreResults.Any(x => x == false))
|
||||
{
|
||||
// if any of the restore failed, fallback to full package.
|
||||
fallbackToFullPackage = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (fallbackToFullPackage)
|
||||
{
|
||||
Trace.Error("Something wrong with the trimmed runner package, failback to use the full package for runner updates.");
|
||||
_updateTrace.Enqueue($"FallbackToFullPackage: {fallbackToFullPackage}");
|
||||
|
||||
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
|
||||
Directory.CreateDirectory(latestRunnerDirectory);
|
||||
|
||||
packageDownloadUrl = _targetPackage.DownloadUrl;
|
||||
packageHashValue = _targetPackage.HashValue;
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
{
|
||||
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
|
||||
|
||||
if (string.IsNullOrEmpty(archiveFile))
|
||||
{
|
||||
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
|
||||
}
|
||||
|
||||
await ValidateRunnerHash(archiveFile, packageHashValue);
|
||||
|
||||
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
// delete .zip file
|
||||
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
|
||||
{
|
||||
Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile);
|
||||
IOUtil.DeleteFile(archiveFile);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
//it is not critical if we fail to delete the .zip file
|
||||
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
|
||||
}
|
||||
|
||||
@@ -510,9 +665,9 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// delete old bin.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*");
|
||||
if (allBinDirs.Length > _numberOfOldVersionsToKeep)
|
||||
if (allBinDirs.Length > 2)
|
||||
{
|
||||
// there are more than one bin.version folder.
|
||||
// there are more than 2 bin.version folder.
|
||||
// delete older bin.version folders.
|
||||
foreach (var oldBinDir in allBinDirs)
|
||||
{
|
||||
@@ -539,9 +694,9 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// delete old externals.2.99.0 folder, only leave the current version and the latest download version
|
||||
var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*");
|
||||
if (allExternalsDirs.Length > _numberOfOldVersionsToKeep)
|
||||
if (allExternalsDirs.Length > 2)
|
||||
{
|
||||
// there are more than one externals.version folder.
|
||||
// there are more than 2 externals.version folder.
|
||||
// delete older externals.version folders.
|
||||
foreach (var oldExternalDir in allExternalsDirs)
|
||||
{
|
||||
@@ -640,5 +795,330 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedExternals(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's externals if we are using a externals trimmed package
|
||||
// Execute the node.js to make sure the copied externals is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_externalsCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory)}.");
|
||||
IOUtil.CopyDirectory(_externalsCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory), token);
|
||||
|
||||
// try run node.js to see if current node.js works fine after copy over to new location.
|
||||
var nodeVersions = NodeUtil.BuiltInNodeVersions;
|
||||
foreach (var nodeVersion in nodeVersions)
|
||||
{
|
||||
var newNodeBinary = Path.Combine(downloadDirectory, Constants.Path.ExternalsDirectory, nodeVersion, "bin", $"node{IOUtil.ExeExtension}");
|
||||
if (File.Exists(newNodeBinary))
|
||||
{
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
var outputs = "";
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
outputs = data.Data;
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newNodeBinary, $"-e \"console.log('{nameof(RestoreTrimmedExternals)}')\"", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(outputs, nameof(RestoreTrimmedExternals), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Error($"{newNodeBinary} -e \"console.log()\" did not output expected content.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore externals for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> RestoreTrimmedDotnetRuntime(string downloadDirectory, CancellationToken token)
|
||||
{
|
||||
// Copy the current runner's dotnet runtime if we are using a dotnet runtime trimmed package
|
||||
// Execute the runner.listener to make sure the copied runtime is working.
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Copy {_dotnetRuntimeCloneDirectory} to {Path.Combine(downloadDirectory, Constants.Path.BinDirectory)}.");
|
||||
IOUtil.CopyDirectory(_dotnetRuntimeCloneDirectory, Path.Combine(downloadDirectory, Constants.Path.BinDirectory), token);
|
||||
|
||||
// try run the runner executable to see if current dotnet runtime + future runner binary works fine.
|
||||
var newRunnerBinary = Path.Combine(downloadDirectory, Constants.Path.BinDirectory, "Runner.Listener");
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
p.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Error(data.Data);
|
||||
}
|
||||
};
|
||||
p.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data))
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
var exitCode = await p.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), newRunnerBinary, "--version", null, token);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"{newRunnerBinary} --version failed with exit code {exitCode}");
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to restore dotnet runtime for trimmed package: {ex}");
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(RestoreTrimmedDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CloneAndCalculateAssetsHash(string dotnetRuntimeCloneDirectory, string externalsCloneDirectory, CancellationToken token)
|
||||
{
|
||||
var runtimeCloneTask = CloneDotnetRuntime(dotnetRuntimeCloneDirectory, token);
|
||||
var externalsCloneTask = CloneExternals(externalsCloneDirectory, token);
|
||||
|
||||
var waitingTasks = new Dictionary<string, Task>()
|
||||
{
|
||||
{nameof(CloneDotnetRuntime), runtimeCloneTask},
|
||||
{nameof(CloneExternals),externalsCloneTask}
|
||||
};
|
||||
|
||||
while (waitingTasks.Count > 0)
|
||||
{
|
||||
Trace.Info($"Waiting for {waitingTasks.Count} tasks to complete.");
|
||||
var complatedTask = await Task.WhenAny(waitingTasks.Values);
|
||||
if (waitingTasks.ContainsKey(nameof(CloneExternals)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneExternals)])
|
||||
{
|
||||
Trace.Info($"Externals clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneExternals));
|
||||
try
|
||||
{
|
||||
if (await externalsCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var externalsHash = await HashFiles(externalsCloneDirectory, token);
|
||||
Trace.Info($"Externals content hash: {externalsHash}");
|
||||
_contentHashes[_externals] = externalsHash;
|
||||
_updateTrace.Enqueue($"ExternalsHash: {_contentHashes[_externals]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone externals failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash externals content: {ex}");
|
||||
}
|
||||
}
|
||||
else if (waitingTasks.ContainsKey(nameof(CloneDotnetRuntime)) &&
|
||||
complatedTask == waitingTasks[nameof(CloneDotnetRuntime)])
|
||||
{
|
||||
Trace.Info($"Dotnet runtime clone finished.");
|
||||
waitingTasks.Remove(nameof(CloneDotnetRuntime));
|
||||
try
|
||||
{
|
||||
if (await runtimeCloneTask && !token.IsCancellationRequested)
|
||||
{
|
||||
var runtimeHash = await HashFiles(dotnetRuntimeCloneDirectory, token);
|
||||
Trace.Info($"Runtime content hash: {runtimeHash}");
|
||||
_contentHashes[_dotnetRuntime] = runtimeHash;
|
||||
_updateTrace.Enqueue($"DotnetRuntimeHash: {_contentHashes[_dotnetRuntime]}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Skip compute hash since clone dotnet runtime failed/cancelled.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to hash runtime content: {ex}");
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Still waiting for {waitingTasks.Count} tasks to complete.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> CloneDotnetRuntime(string runtimeDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning dotnet runtime to {runtimeDir}");
|
||||
IOUtil.DeleteDirectory(runtimeDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(runtimeDir);
|
||||
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var assetsContent = default(string);
|
||||
using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Listener.runnercoreassets"))
|
||||
using (var streamReader = new StreamReader(stream))
|
||||
{
|
||||
assetsContent = await streamReader.ReadToEndAsync();
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(assetsContent))
|
||||
{
|
||||
var runnerCoreAssets = assetsContent.Split(new[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (runnerCoreAssets.Length > 0)
|
||||
{
|
||||
var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
IOUtil.CopyDirectory(binDir, runtimeDir, token);
|
||||
|
||||
var clonedFile = 0;
|
||||
foreach (var file in Directory.EnumerateFiles(runtimeDir, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
if (runnerCoreAssets.Any(x => file.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar).EndsWith(x.Trim())))
|
||||
{
|
||||
Trace.Verbose($"{file} is part of the runner core, delete from cloned runtime directory.");
|
||||
IOUtil.DeleteFile(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
clonedFile++;
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Successfully cloned dotnet runtime to {runtimeDir}. Total files: {clonedFile}");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone dotnet runtime to {runtimeDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneDotnetRuntime)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private Task<bool> CloneExternals(string externalsDir, CancellationToken token)
|
||||
{
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
Trace.Info($"Cloning externals to {externalsDir}");
|
||||
IOUtil.DeleteDirectory(externalsDir, CancellationToken.None);
|
||||
Directory.CreateDirectory(externalsDir);
|
||||
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), externalsDir, token);
|
||||
Trace.Info($"Successfully cloned externals to {externalsDir}.");
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Fail to clone externals to {externalsDir}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(CloneExternals)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
private async Task<string> HashFiles(string fileFolder, CancellationToken token)
|
||||
{
|
||||
Trace.Info($"Calculating hash for {fileFolder}");
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
string binDir = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.ErrorDataReceived += (_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info(data.Data);
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.OutputDataReceived += (_, data) =>
|
||||
{
|
||||
Trace.Verbose(data.Data);
|
||||
};
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
["patterns"] = "**"
|
||||
};
|
||||
|
||||
int exitCode = await processInvoker.ExecuteAsync(workingDirectory: fileFolder,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: token);
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
Trace.Error($"hashFiles returns '{exitCode}' failed. Fail to hash files under directory '{fileFolder}'");
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_updateTrace.Enqueue($"{nameof(HashFiles)}{Path.GetFileName(fileFolder)}Time: {stopWatch.ElapsedMilliseconds}ms");
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,6 +149,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
string archiveFile = null;
|
||||
|
||||
// Only try trimmed package if sever sends them and we have calculated hash value of the current runtime/externals.
|
||||
_updateTrace.Enqueue($"DownloadUrl: {packageDownloadUrl}");
|
||||
|
||||
try
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<SelfContained>true</SelfContained>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
|
||||
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||
<SelfContained>true</SelfContained>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<NoWarn>NU1701;NU1603;SYSLIB0050;SYSLIB0051</NoWarn>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
</PropertyGroup>
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user