diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 245547cd5..5eb31abc8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,10 +4,13 @@ "features": { "ghcr.io/devcontainers/features/docker-in-docker:1": {}, "ghcr.io/devcontainers/features/dotnet": { - "version": "6.0.418" + "version": "8.0.413" }, "ghcr.io/devcontainers/features/node:1": { - "version": "16" + "version": "20" + }, + "ghcr.io/devcontainers/features/sshd:1": { + "version": "latest" } }, "customizations": { diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 0822f6a10..963325228 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -7,7 +7,7 @@ contact_links: url: https://github.community/c/code-to-cloud/52 about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum. - name: ✅ Feedback and suggestions for GitHub Actions - url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback + url: https://github.com/github/feedback/discussions/categories/actions about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum. - name: ‼️ GitHub Security Bug Bounty url: https://bounty.github.com/ diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 000000000..834c16632 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,25 @@ +## Making changes + +### Tests + +Whenever possible, changes should be accompanied by non-trivial tests that meaningfully exercise the core functionality of the new code being introduced. + +All tests are in the `Test/` directory at the repo root. Fast unit tests are in the `Test/L0` directory and by convention have the suffix `L0.cs`. For example: unit tests for a hypothetical `src/Runner.Worker/Foo.cs` would go in `src/Test/L0/Worker/FooL0.cs`. + +Run tests using this command: + +```sh +cd src && ./dev.sh test +``` + +### Formatting + +After editing .cs files, always format the code using this command: + +```sh +cd src && ./dev.sh format +``` + +### Feature Flags + +Wherever possible, all changes should be safeguarded by a feature flag; `Features` are declared in [Constants.cs](src/Runner.Common/Constants.cs). diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 6c718529c..685264a75 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,6 +5,11 @@ updates: schedule: interval: "daily" target-branch: "main" +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + target-branch: "main" - package-ecosystem: "nuget" directory: "/src" schedule: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7eabfb9cf..72d64cda8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -41,7 +41,7 @@ jobs: devScript: ./dev.sh - runtime: win-x64 - os: windows-2019 + os: windows-latest devScript: ./dev - runtime: win-arm64 @@ -50,7 +50,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 # Build runner layout - name: Build & Layout Release @@ -69,13 +69,13 @@ jobs: - name: Package Release if: github.event_name != 'pull_request' run: | - ${{ matrix.devScript }} package Release + ${{ matrix.devScript }} package Release ${{ matrix.runtime }} working-directory: src # Upload runner package tar.gz/zip as artifact - name: Publish Artifact if: github.event_name != 'pull_request' - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: runner-package-${{ matrix.runtime }} path: | diff --git a/.github/workflows/close-bugs-bot.yml b/.github/workflows/close-bugs-bot.yml index 4a0e129fb..6a5a2feb0 100644 --- a/.github/workflows/close-bugs-bot.yml +++ b/.github/workflows/close-bugs-bot.yml @@ -7,7 +7,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: close-issue-message: "This issue does not seem to be a problem with the runner application, it concerns the GitHub actions platform more generally. Could you please post your feedback on the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃" exempt-issue-labels: "keep" diff --git a/.github/workflows/close-features-bot.yml b/.github/workflows/close-features-bot.yml index a710a8a84..6207aab55 100644 --- a/.github/workflows/close-features-bot.yml +++ b/.github/workflows/close-features-bot.yml @@ -7,7 +7,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: close-issue-message: "Thank you for your interest in the runner application and taking the time to provide your valuable feedback. We kindly ask you to redirect this feedback to the [GitHub Community Support Forum](https://github.com/orgs/community/discussions/categories/actions-and-packages) which our team actively monitors and would be a better place to start a discussion for new feature requests in GitHub Actions. For more information on this policy please [read our contribution guidelines](https://github.com/actions/runner#contribute). 😃" exempt-issue-labels: "keep" diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5b6e05236..08b8ed681 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -23,11 +23,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 # Override language selection by uncommenting this and choosing your languages # with: # languages: go, javascript, csharp, python, cpp, java @@ -38,4 +38,4 @@ jobs: working-directory: src - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/docker-buildx-upgrade.yml b/.github/workflows/docker-buildx-upgrade.yml new file mode 100644 index 000000000..2a2144159 --- /dev/null +++ b/.github/workflows/docker-buildx-upgrade.yml @@ -0,0 +1,144 @@ +name: "Docker/Buildx Version Upgrade" + +on: + schedule: + - cron: '0 0 * * 1' # Run every Monday at midnight + workflow_dispatch: # Allow manual triggering + +jobs: + check-versions: + runs-on: ubuntu-latest + outputs: + DOCKER_SHOULD_UPDATE: ${{ steps.check_docker_version.outputs.SHOULD_UPDATE }} + DOCKER_LATEST_VERSION: ${{ steps.check_docker_version.outputs.LATEST_VERSION }} + DOCKER_CURRENT_VERSION: ${{ steps.check_docker_version.outputs.CURRENT_VERSION }} + BUILDX_SHOULD_UPDATE: ${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }} + BUILDX_LATEST_VERSION: ${{ steps.check_buildx_version.outputs.LATEST_VERSION }} + BUILDX_CURRENT_VERSION: ${{ steps.check_buildx_version.outputs.CURRENT_VERSION }} + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Check Docker version + id: check_docker_version + shell: bash + run: | + # Extract current Docker version from Dockerfile + current_version=$(grep "ARG DOCKER_VERSION=" ./images/Dockerfile | cut -d'=' -f2) + + # Fetch latest Docker Engine version from Docker's download site + # This gets the latest Linux static binary version which matches what's used in the Dockerfile + latest_version=$(curl -s https://download.docker.com/linux/static/stable/x86_64/ | grep -o 'docker-[0-9]*\.[0-9]*\.[0-9]*\.tgz' | sort -V | tail -n 1 | sed 's/docker-\(.*\)\.tgz/\1/') + + # Extra check to ensure we got a valid version + if [[ ! $latest_version =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "Failed to retrieve a valid Docker version" + exit 1 + fi + + should_update=0 + [ "$current_version" != "$latest_version" ] && should_update=1 + + echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT + echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT + echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT + + - name: Check Buildx version + id: check_buildx_version + shell: bash + run: | + # Extract current Buildx version from Dockerfile + current_version=$(grep "ARG BUILDX_VERSION=" ./images/Dockerfile | cut -d'=' -f2) + + # Fetch latest Buildx version + latest_version=$(curl -s https://api.github.com/repos/docker/buildx/releases/latest | jq -r '.tag_name' | sed 's/^v//') + + should_update=0 + [ "$current_version" != "$latest_version" ] && should_update=1 + + echo "CURRENT_VERSION=${current_version}" >> $GITHUB_OUTPUT + echo "LATEST_VERSION=${latest_version}" >> $GITHUB_OUTPUT + echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT + + - name: Create annotations for versions + run: | + docker_should_update="${{ steps.check_docker_version.outputs.SHOULD_UPDATE }}" + buildx_should_update="${{ steps.check_buildx_version.outputs.SHOULD_UPDATE }}" + + # Show annotation if only Docker needs update + if [[ "$docker_should_update" == "1" && "$buildx_should_update" == "0" ]]; then + echo "::warning ::Docker version (${{ steps.check_docker_version.outputs.LATEST_VERSION }}) needs update but Buildx is current. Only updating when both need updates." + fi + + # Show annotation if only Buildx needs update + if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "1" ]]; then + echo "::warning ::Buildx version (${{ steps.check_buildx_version.outputs.LATEST_VERSION }}) needs update but Docker is current. Only updating when both need updates." + fi + + # Show annotation when both are current + if [[ "$docker_should_update" == "0" && "$buildx_should_update" == "0" ]]; then + echo "::warning ::Latest Docker version is ${{ steps.check_docker_version.outputs.LATEST_VERSION }} and Buildx version is ${{ steps.check_buildx_version.outputs.LATEST_VERSION }}. No updates needed." + fi + + update-versions: + permissions: + pull-requests: write + contents: write + needs: [check-versions] + if: ${{ needs.check-versions.outputs.DOCKER_SHOULD_UPDATE == 1 && needs.check-versions.outputs.BUILDX_SHOULD_UPDATE == 1 }} + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Update Docker version + shell: bash + run: | + latest_version="${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }}" + current_version="${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }}" + + # Update version in Dockerfile + sed -i "s/ARG DOCKER_VERSION=$current_version/ARG DOCKER_VERSION=$latest_version/g" ./images/Dockerfile + + - name: Update Buildx version + shell: bash + run: | + latest_version="${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}" + current_version="${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }}" + + # Update version in Dockerfile + sed -i "s/ARG BUILDX_VERSION=$current_version/ARG BUILDX_VERSION=$latest_version/g" ./images/Dockerfile + + - name: Commit changes and create Pull Request + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Setup branch and commit information + branch_name="feature/docker-buildx-upgrade" + commit_message="Upgrade Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}" + pr_title="Update Docker to v${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Buildx to v${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}" + + # Configure git + git config --global user.name "github-actions[bot]" + git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>" + + # Create branch or switch to it if it exists + if git show-ref --quiet refs/remotes/origin/$branch_name; then + git fetch origin + git checkout -B "$branch_name" origin/$branch_name + else + git checkout -b "$branch_name" + fi + + # Commit and push changes + git commit -a -m "$commit_message" + git push --force origin "$branch_name" + + # Create PR + pr_body="Upgrades Docker version from ${{ needs.check-versions.outputs.DOCKER_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.DOCKER_LATEST_VERSION }} and Docker Buildx version from ${{ needs.check-versions.outputs.BUILDX_CURRENT_VERSION }} to ${{ needs.check-versions.outputs.BUILDX_LATEST_VERSION }}.\n\n" + pr_body+="Release notes: https://docs.docker.com/engine/release-notes/\n\n" + pr_body+="---\n\nAutogenerated by [Docker/Buildx Version Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/docker-buildx-upgrade.yml)" + + gh pr create -B main -H "$branch_name" \ + --title "$pr_title" \ + --body "$pr_body" diff --git a/.github/workflows/dotnet-upgrade.yml b/.github/workflows/dotnet-upgrade.yml index eb15e762e..80049e643 100644 --- a/.github/workflows/dotnet-upgrade.yml +++ b/.github/workflows/dotnet-upgrade.yml @@ -15,7 +15,7 @@ jobs: DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }} steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v5 - name: Get current major minor version id: fetch_current_version shell: bash @@ -51,7 +51,7 @@ jobs: run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate" - name: Create a warning annotation if no need to update if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }} - run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update" + run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update" - name: Update patch version if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }} shell: bash @@ -89,7 +89,7 @@ jobs: if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 with: ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} - name: Create Pull Request diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index b35bee3c7..000000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Lint - -on: - pull_request: - branches: [ main ] - -jobs: - build: - name: Lint - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v3 - with: - # Ensure full list of changed files within `super-linter` - fetch-depth: 0 - - name: Run linters - uses: github/super-linter@v4 - env: - DEFAULT_BRANCH: ${{ github.base_ref }} - EDITORCONFIG_FILE_NAME: .editorconfig - LINTER_RULES_PATH: /src/ - VALIDATE_ALL_CODEBASE: false - VALIDATE_CSHARP: true diff --git a/.github/workflows/publish-image.yml b/.github/workflows/publish-image.yml deleted file mode 100644 index fa906b294..000000000 --- a/.github/workflows/publish-image.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Publish Runner Image - -on: - workflow_dispatch: - inputs: - runnerVersion: - type: string - description: Version of the runner being installed - -env: - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository_owner }}/actions-runner - -jobs: - build: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Compute image version - id: image - uses: actions/github-script@v6 - with: - script: | - const fs = require('fs'); - const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}" - if (inputRunnerVersion) { - console.log(`Using input runner version ${inputRunnerVersion}`) - core.setOutput('version', inputRunnerVersion); - return - } - const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '') - console.log(`Using runner version ${runnerVersion}`) - core.setOutput('version', runnerVersion); - - - name: Setup Docker buildx - uses: docker/setup-buildx-action@v2 - - - name: Log into registry ${{ env.REGISTRY }} - uses: docker/login-action@v2 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build and push Docker image - id: build-and-push - uses: docker/build-push-action@v3 - with: - context: ./images - platforms: | - linux/amd64 - linux/arm64 - tags: | - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }} - ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest - build-args: | - RUNNER_VERSION=${{ steps.image.outputs.version }} - push: true - labels: | - org.opencontainers.image.source=${{github.server_url}}/${{github.repository}} - org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }} - org.opencontainers.image.licenses=MIT diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index de94bcc4b..702eea07d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,16 +11,15 @@ jobs: if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 # Make sure ./releaseVersion match ./src/runnerversion # Query GitHub release ensure version is not used - name: Check version - uses: actions/github-script@0.3.0 + uses: actions/github-script@v7.0.1 with: github-token: ${{secrets.GITHUB_TOKEN}} script: | - const core = require('@actions/core') const fs = require('fs'); const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '') const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '') @@ -30,7 +29,7 @@ jobs: return } try { - const release = await github.repos.getReleaseByTag({ + const release = await github.rest.repos.getReleaseByTag({ owner: '${{ github.event.repository.owner.name }}', repo: '${{ github.event.repository.name }}', tag: 'v' + runnerVersion @@ -78,7 +77,7 @@ jobs: devScript: ./dev.sh - runtime: win-x64 - os: windows-2019 + os: windows-latest devScript: ./dev - runtime: win-arm64 @@ -87,7 +86,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 # Build runner layout - name: Build & Layout Release @@ -117,12 +116,11 @@ jobs: working-directory: _package # Upload runner package tar.gz/zip as artifact. - # Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name - name: Publish Artifact if: github.event_name != 'pull_request' - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: runner-packages + name: runner-packages-${{ matrix.runtime }} path: | _package @@ -131,23 +129,52 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v5 # Download runner package tar.gz/zip produced by 'build' job - - name: Download Artifact - uses: actions/download-artifact@v1 + - name: Download Artifact (win-x64) + uses: actions/download-artifact@v5 with: - name: runner-packages + name: runner-packages-win-x64 + path: ./ + - name: Download Artifact (win-arm64) + uses: actions/download-artifact@v5 + with: + name: runner-packages-win-arm64 + path: ./ + - name: Download Artifact (osx-x64) + uses: actions/download-artifact@v5 + with: + name: runner-packages-osx-x64 + path: ./ + - name: Download Artifact (osx-arm64) + uses: actions/download-artifact@v5 + with: + name: runner-packages-osx-arm64 + path: ./ + - name: Download Artifact (linux-x64) + uses: actions/download-artifact@v5 + with: + name: runner-packages-linux-x64 + path: ./ + - name: Download Artifact (linux-arm) + uses: actions/download-artifact@v5 + with: + name: runner-packages-linux-arm + path: ./ + - name: Download Artifact (linux-arm64) + uses: actions/download-artifact@v5 + with: + name: runner-packages-linux-arm64 path: ./ # Create ReleaseNote file - name: Create ReleaseNote id: releaseNote - uses: actions/github-script@0.3.0 + uses: actions/github-script@v7.0.1 with: github-token: ${{secrets.GITHUB_TOKEN}} script: | - const core = require('@actions/core') const fs = require('fs'); const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '') var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(//g, runnerVersion) @@ -187,7 +214,7 @@ jobs: # Upload release assets (full runner packages) - name: Upload Release Asset (win-x64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -197,7 +224,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (win-arm64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -207,7 +234,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (linux-x64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -217,7 +244,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (osx-x64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -227,7 +254,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (osx-arm64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -237,7 +264,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (linux-arm) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -247,7 +274,7 @@ jobs: asset_content_type: application/octet-stream - name: Upload Release Asset (linux-arm64) - uses: actions/upload-release-asset@v1.0.1 + uses: actions/upload-release-asset@v1.0.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: @@ -262,16 +289,18 @@ jobs: permissions: contents: read packages: write + id-token: write + attestations: write env: REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository_owner }}/actions-runner steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v5 - name: Compute image version id: image - uses: actions/github-script@v6 + uses: actions/github-script@v7.0.1 with: script: | const fs = require('fs'); @@ -280,10 +309,10 @@ jobs: core.setOutput('version', runnerVersion); - name: Setup Docker buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Log into registry ${{ env.REGISTRY }} - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ${{ env.REGISTRY }} username: ${{ github.actor }} @@ -291,7 +320,7 @@ jobs: - name: Build and push Docker image id: build-and-push - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v6 with: context: ./images platforms: | @@ -307,3 +336,10 @@ jobs: org.opencontainers.image.source=${{github.server_url}}/${{github.repository}} org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }} org.opencontainers.image.licenses=MIT + + - name: Generate attestation + uses: actions/attest-build-provenance@v3 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + subject-digest: ${{ steps.build-and-push.outputs.digest }} + push-to-registry: true diff --git a/.github/workflows/stale-bot.yml b/.github/workflows/stale-bot.yml index bec1321bb..d0d7e115a 100644 --- a/.github/workflows/stale-bot.yml +++ b/.github/workflows/stale-bot.yml @@ -7,7 +7,7 @@ jobs: stale: runs-on: ubuntu-latest steps: - - uses: actions/stale@v8 + - uses: actions/stale@v9 with: stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days." close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity." diff --git a/.gitignore b/.gitignore index 34d18c4ce..411fe4011 100644 --- a/.gitignore +++ b/.gitignore @@ -26,4 +26,5 @@ _dotnetsdk TestResults TestLogs .DS_Store +.mono **/*.DotSettings.user \ No newline at end of file diff --git a/README.md b/README.md index 7b7946426..a2e3bcc97 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,20 @@ Runner releases: ![linux](docs/res/linux_sm.png) [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases) -## Contribute +### Note -We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing. +Thank you for your interest in this GitHub repo, however, right now we are not taking contributions. + +We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in. + +We are taking the following steps to better direct requests related to GitHub Actions, including: + +1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions) + +2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report. + +3. Security Issues should be handled as per our [security.md](security.md) + +We will still provide security updates for this project and fix major breaking changes during this time. + +You are welcome to still raise bugs in this repo. diff --git a/docs/adrs/0276-problem-matchers.md b/docs/adrs/0276-problem-matchers.md index bed1f75ff..5d7a034d1 100644 --- a/docs/adrs/0276-problem-matchers.md +++ b/docs/adrs/0276-problem-matchers.md @@ -250,6 +250,42 @@ Two problem matchers can be used: } ``` +#### Default from path + +The problem matcher can specify a `fromPath` property at the top level, which applies when a specific pattern doesn't provide a value for `fromPath`. This is useful for tools that don't include project file information in their output. + +For example, given the following compiler output that doesn't include project file information: + +``` +ClassLibrary.cs(16,24): warning CS0612: 'ClassLibrary.Helpers.MyHelper.Name' is obsolete +``` + +A problem matcher with a default from path can be used: + +```json +{ + "problemMatcher": [ + { + "owner": "csc-minimal", + "fromPath": "ClassLibrary/ClassLibrary.csproj", + "pattern": [ + { + "regexp": "^(.+)\\((\\d+),(\\d+)\\): (error|warning) (.+): (.*)$", + "file": 1, + "line": 2, + "column": 3, + "severity": 4, + "code": 5, + "message": 6 + } + ] + } + ] +} +``` + +This ensures that the file is rooted to the correct path when there's not enough information in the error messages to extract a `fromPath`. + #### Mitigate regular expression denial of service (ReDos) If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total. diff --git a/docs/adrs/1751-runner-job-hooks.md b/docs/adrs/1751-runner-job-hooks.md index e13d42e34..8e81f50e7 100644 --- a/docs/adrs/1751-runner-job-hooks.md +++ b/docs/adrs/1751-runner-job-hooks.md @@ -23,7 +23,7 @@ This feature is mainly intended for self hosted runner administrators. - `ACTIONS_RUNNER_HOOK_JOB_STARTED` - `ACTIONS_RUNNER_HOOK_JOB_COMPLETED` -You can set these variables to the **absolute** path of a a `.sh` or `.ps1` file. +You can set these variables to the **absolute** path of a `.sh` or `.ps1` file. We will execute `pwsh` (fallback to `powershell`) or `bash` (fallback to `sh`) as appropriate. - `.sh` files will execute with the args `-e {pathtofile}` diff --git a/docs/checks/nodejs.md b/docs/checks/nodejs.md index cbdd8659b..62b2958cd 100644 --- a/docs/checks/nodejs.md +++ b/docs/checks/nodejs.md @@ -4,9 +4,9 @@ Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server. -The runner carries its own copy of node.js executable under `/externals/node16/`. +The runner carries its own copies of node.js executables under `/externals/node20/` and `/externals/node24/`. -All javascript base Actions will get executed by the built-in `node` at `/externals/node16/`. +All javascript base Actions will get executed by the built-in `node` at either `/externals/node20/` or `/externals/node24/` depending on the version specified in the action's metadata. > Not the `node` from `$PATH` diff --git a/docs/contribute.md b/docs/contribute.md index 56301b1d2..ff98eef14 100644 --- a/docs/contribute.md +++ b/docs/contribute.md @@ -1,6 +1,6 @@ # Contributions -We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. +We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors. Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. > IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost. diff --git a/docs/start/envlinux.md b/docs/start/envlinux.md index 11eff1876..b4b8c8c76 100644 --- a/docs/start/envlinux.md +++ b/docs/start/envlinux.md @@ -4,16 +4,7 @@ ## Supported Distributions and Versions -x64 - - Red Hat Enterprise Linux 7+ - - CentOS 7+ - - Oracle Linux 7+ - - Fedora 29+ - - Debian 9+ - - Ubuntu 16.04+ - - Linux Mint 18+ - - openSUSE 15+ - - SUSE Enterprise Linux (SLES) 12 SP2+ +Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#linux)." ## Install .Net Core 3.x Linux Dependencies diff --git a/docs/start/envosx.md b/docs/start/envosx.md index 7a2e95fc8..ec9f7081d 100644 --- a/docs/start/envosx.md +++ b/docs/start/envosx.md @@ -4,7 +4,6 @@ ## Supported Versions - - macOS High Sierra (10.13) and later versions - - x64 and arm64 (Apple Silicon) +Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#macos)." ## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30) diff --git a/docs/start/envwin.md b/docs/start/envwin.md index 76d392b86..13e95fb0e 100644 --- a/docs/start/envwin.md +++ b/docs/start/envwin.md @@ -2,11 +2,6 @@ ## Supported Versions - - Windows 7 64-bit - - Windows 8.1 64-bit - - Windows 10 64-bit - - Windows Server 2012 R2 64-bit - - Windows Server 2016 64-bit - - Windows Server 2019 64-bit +Please see "[Supported architectures and operating systems for self-hosted runners](https://docs.github.com/en/actions/reference/runners/self-hosted-runners#windows)." ## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30) diff --git a/images/Dockerfile b/images/Dockerfile index 424817225..3b0769231 100644 --- a/images/Dockerfile +++ b/images/Dockerfile @@ -1,12 +1,12 @@ # Source: https://github.com/dotnet/dotnet-docker -FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy as build +FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy AS build ARG TARGETOS ARG TARGETARCH ARG RUNNER_VERSION -ARG RUNNER_CONTAINER_HOOKS_VERSION=0.4.0 -ARG DOCKER_VERSION=24.0.9 -ARG BUILDX_VERSION=0.12.1 +ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0 +ARG DOCKER_VERSION=28.3.3 +ARG BUILDX_VERSION=0.27.0 RUN apt update -y && apt install curl unzip -y @@ -32,17 +32,22 @@ RUN export RUNNER_ARCH=${TARGETARCH} \ "https://github.com/docker/buildx/releases/download/v${BUILDX_VERSION}/buildx-v${BUILDX_VERSION}.linux-${TARGETARCH}" \ && chmod +x /usr/local/lib/docker/cli-plugins/docker-buildx -FROM mcr.microsoft.com/dotnet/runtime-deps:6.0-jammy +FROM mcr.microsoft.com/dotnet/runtime-deps:8.0-jammy ENV DEBIAN_FRONTEND=noninteractive ENV RUNNER_MANUALLY_TRAP_SIG=1 ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1 ENV ImageOS=ubuntu22 -RUN apt-get update -y \ - && apt-get install -y --no-install-recommends \ - sudo \ - lsb-release \ +# 'gpg-agent' and 'software-properties-common' are needed for the 'add-apt-repository' command that follows +RUN apt update -y \ + && apt install -y --no-install-recommends sudo lsb-release gpg-agent software-properties-common curl jq unzip \ + && rm -rf /var/lib/apt/lists/* + +# Configure git-core/ppa based on guidance here: https://git-scm.com/download/linux +RUN add-apt-repository ppa:git-core/ppa \ + && apt update -y \ + && apt install -y git \ && rm -rf /var/lib/apt/lists/* RUN adduser --disabled-password --gecos "" --uid 1001 runner \ diff --git a/releaseNote.md b/releaseNote.md index f3e576c8d..91dfdd1ae 100644 --- a/releaseNote.md +++ b/releaseNote.md @@ -1,36 +1,32 @@ ## What's Changed -* Fix `buildx` installation by @ajschmidt8 in https://github.com/actions/runner/pull/2952 -* Create close-features and close-bugs bot for runner issues by @ruvceskistefan in https://github.com/actions/runner/pull/2909 -* Send disableUpdate as query parameter by @luketomlinson in https://github.com/actions/runner/pull/2970 -* Handle SelfUpdate Flow when Package is provided in Message by @luketomlinson in https://github.com/actions/runner/pull/2926 -* Bump container hook version to 0.5.0 in runner image by @nikola-jokic in https://github.com/actions/runner/pull/3003 -* Set `ImageOS` environment variable in runner images by @int128 in https://github.com/actions/runner/pull/2878 -* Mark job as failed on worker crash. by @TingluoHuang in https://github.com/actions/runner/pull/3006 -* Include whether http proxy configured as part of UserAgent. by @TingluoHuang in https://github.com/actions/runner/pull/3009 -* Add codeload to the list of service we check during '--check'. by @TingluoHuang in https://github.com/actions/runner/pull/3011 -* close reason update by @ruvceskistefan in https://github.com/actions/runner/pull/3027 -* Update envlinux.md by @adjn in https://github.com/actions/runner/pull/3040 -* Extend `--check` to check Results-Receiver service. by @TingluoHuang in https://github.com/actions/runner/pull/3078 -* Use Azure SDK to upload files to Azure Blob by @yacaovsnc in https://github.com/actions/runner/pull/3033 -* Remove code in runner for handling trimmed packages. by @TingluoHuang in https://github.com/actions/runner/pull/3074 -* Update dotnet sdk to latest version @6.0.418 by @github-actions in https://github.com/actions/runner/pull/3085 -* Patch Curl to no longer use -k by @thboop in https://github.com/actions/runner/pull/3091 +* Update Docker to v28.3.2 and Buildx to v0.26.1 by @github-actions[bot] in https://github.com/actions/runner/pull/3953 +* Fix if statement structure in update script and variable reference by @salmanmkc in https://github.com/actions/runner/pull/3956 +* Add V2 flow for runner deletion by @Samirat in https://github.com/actions/runner/pull/3954 +* Node 20 -> Node 24 migration feature flagging, opt-in and opt-out environment variables by @salmanmkc in https://github.com/actions/runner/pull/3948 +* Update Node20 and Node24 to latest by @djs-intel in https://github.com/actions/runner/pull/3972 +* Redirect supported OS doc section to current public Docs location by @corycalahan in https://github.com/actions/runner/pull/3979 +* Bump Microsoft.NET.Test.Sdk from 17.13.0 to 17.14.1 by @dependabot[bot] in https://github.com/actions/runner/pull/3975 +* Bump Azure.Storage.Blobs from 12.24.0 to 12.25.0 by @dependabot[bot] in https://github.com/actions/runner/pull/3974 +* Bump actions/download-artifact from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3973 +* Bump actions/checkout from 4 to 5 by @dependabot[bot] in https://github.com/actions/runner/pull/3982 ## New Contributors -* @int128 made their first contribution in https://github.com/actions/runner/pull/2878 -* @adjn made their first contribution in https://github.com/actions/runner/pull/3040 +* @Samirat made their first contribution in https://github.com/actions/runner/pull/3954 +* @djs-intel made their first contribution in https://github.com/actions/runner/pull/3972 -**Full Changelog**: https://github.com/actions/runner/compare/v2.311.0...v2.312.0 +**Full Changelog**: https://github.com/actions/runner/compare/v2.327.1...v2.328.0 -_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. -To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. +_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet. +To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository. See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_ ## Windows x64 + We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows. The following snipped needs to be run on `powershell`: -``` powershell + +```powershell # Create a folder under the drive root mkdir \actions-runner ; cd \actions-runner # Download the latest runner package @@ -40,13 +36,13 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ; [System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-.zip", "$PWD") ``` -## [Pre-release] Windows arm64 -**Warning:** Windows arm64 runners are currently in preview status and use [unofficial versions of nodejs](https://unofficial-builds.nodejs.org/). They are not intended for production workflows. +## Windows arm64 We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows. The following snipped needs to be run on `powershell`: -``` powershell + +```powershell # Create a folder under the drive root mkdir \actions-runner ; cd \actions-runner # Download the latest runner package @@ -58,7 +54,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ; ## OSX x64 -``` bash +```bash # Create a folder mkdir actions-runner && cd actions-runner # Download the latest runner package @@ -69,7 +65,7 @@ tar xzf ./actions-runner-osx-x64-.tar.gz ## OSX arm64 (Apple silicon) -``` bash +```bash # Create a folder mkdir actions-runner && cd actions-runner # Download the latest runner package @@ -80,7 +76,7 @@ tar xzf ./actions-runner-osx-arm64-.tar.gz ## Linux x64 -``` bash +```bash # Create a folder mkdir actions-runner && cd actions-runner # Download the latest runner package @@ -91,7 +87,7 @@ tar xzf ./actions-runner-linux-x64-.tar.gz ## Linux arm64 -``` bash +```bash # Create a folder mkdir actions-runner && cd actions-runner # Download the latest runner package @@ -102,7 +98,7 @@ tar xzf ./actions-runner-linux-arm64-.tar.gz ## Linux arm -``` bash +```bash # Create a folder mkdir actions-runner && cd actions-runner # Download the latest runner package @@ -112,6 +108,7 @@ tar xzf ./actions-runner-linux-arm-.tar.gz ``` ## Using your self hosted runner + For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners) ## SHA-256 Checksums diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 9db5faca3..9c069b12f 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -57,4 +57,13 @@ true + + + + true + + all + + moderate + diff --git a/src/Misc/contentHash/dotnetRuntime/linux-arm b/src/Misc/contentHash/dotnetRuntime/linux-arm deleted file mode 100644 index 9f55d62ef..000000000 --- a/src/Misc/contentHash/dotnetRuntime/linux-arm +++ /dev/null @@ -1 +0,0 @@ -54d95a44d118dba852395991224a6b9c1abe916858c87138656f80c619e85331 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/linux-arm64 b/src/Misc/contentHash/dotnetRuntime/linux-arm64 deleted file mode 100644 index c03c98ade..000000000 --- a/src/Misc/contentHash/dotnetRuntime/linux-arm64 +++ /dev/null @@ -1 +0,0 @@ -68015af17f06a824fa478e62ae7393766ce627fd5599ab916432a14656a19a52 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/linux-x64 b/src/Misc/contentHash/dotnetRuntime/linux-x64 deleted file mode 100644 index 95a7155f7..000000000 --- a/src/Misc/contentHash/dotnetRuntime/linux-x64 +++ /dev/null @@ -1 +0,0 @@ -a2628119ca419cb54e279103ffae7986cdbd0814d57c73ff0dc74c38be08b9ae \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/osx-arm64 b/src/Misc/contentHash/dotnetRuntime/osx-arm64 deleted file mode 100644 index d99ff5942..000000000 --- a/src/Misc/contentHash/dotnetRuntime/osx-arm64 +++ /dev/null @@ -1 +0,0 @@ -de71ca09ead807e1a2ce9df0a5b23eb7690cb71fff51169a77e4c3992be53dda \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/osx-x64 b/src/Misc/contentHash/dotnetRuntime/osx-x64 deleted file mode 100644 index 085b329b2..000000000 --- a/src/Misc/contentHash/dotnetRuntime/osx-x64 +++ /dev/null @@ -1 +0,0 @@ -d009e05e6b26d614d65be736a15d1bd151932121c16a9ff1b986deadecc982b9 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/win-arm64 b/src/Misc/contentHash/dotnetRuntime/win-arm64 deleted file mode 100644 index 5c84f556e..000000000 --- a/src/Misc/contentHash/dotnetRuntime/win-arm64 +++ /dev/null @@ -1 +0,0 @@ -f730db39c2305800b4653795360ba9c10c68f384a46b85d808f1f9f0ed3c42e4 \ No newline at end of file diff --git a/src/Misc/contentHash/dotnetRuntime/win-x64 b/src/Misc/contentHash/dotnetRuntime/win-x64 deleted file mode 100644 index 6be8253b1..000000000 --- a/src/Misc/contentHash/dotnetRuntime/win-x64 +++ /dev/null @@ -1 +0,0 @@ -a35b5722375490e9473cdcccb5e18b41eba3dbf4344fe31abc9821e21f18ea5a \ No newline at end of file diff --git a/src/Misc/contentHash/externals/linux-arm b/src/Misc/contentHash/externals/linux-arm deleted file mode 100644 index 62be8089e..000000000 --- a/src/Misc/contentHash/externals/linux-arm +++ /dev/null @@ -1 +0,0 @@ -4bf3e1af0d482af1b2eaf9f08250248a8c1aea8ec20a3c5be116d58cdd930009 \ No newline at end of file diff --git a/src/Misc/contentHash/externals/linux-arm64 b/src/Misc/contentHash/externals/linux-arm64 deleted file mode 100644 index bde540d4f..000000000 --- a/src/Misc/contentHash/externals/linux-arm64 +++ /dev/null @@ -1 +0,0 @@ -ec1719a8cb4d8687328aa64f4aa7c4e3498a715d8939117874782e3e6e63a14b \ No newline at end of file diff --git a/src/Misc/contentHash/externals/linux-x64 b/src/Misc/contentHash/externals/linux-x64 deleted file mode 100644 index d23948a68..000000000 --- a/src/Misc/contentHash/externals/linux-x64 +++ /dev/null @@ -1 +0,0 @@ -50538de29f173bb73f708c4ed2c8328a62b8795829b97b2a6cb57197e2305287 \ No newline at end of file diff --git a/src/Misc/contentHash/externals/osx-arm64 b/src/Misc/contentHash/externals/osx-arm64 deleted file mode 100644 index bea235cd7..000000000 --- a/src/Misc/contentHash/externals/osx-arm64 +++ /dev/null @@ -1 +0,0 @@ -a0a96cbb7593643b69e669bf14d7b29b7f27800b3a00bb3305aebe041456c701 \ No newline at end of file diff --git a/src/Misc/contentHash/externals/osx-x64 b/src/Misc/contentHash/externals/osx-x64 deleted file mode 100644 index d61ff6fd5..000000000 --- a/src/Misc/contentHash/externals/osx-x64 +++ /dev/null @@ -1 +0,0 @@ -6255b22692779467047ecebd60ad46984866d75cdfe10421d593a7b51d620b09 \ No newline at end of file diff --git a/src/Misc/contentHash/externals/win-arm64 b/src/Misc/contentHash/externals/win-arm64 deleted file mode 100644 index d0bd205e5..000000000 --- a/src/Misc/contentHash/externals/win-arm64 +++ /dev/null @@ -1 +0,0 @@ -6ff1abd055dc35bfbf06f75c2f08908f660346f66ad1d8f81c910068e9ba029d \ No newline at end of file diff --git a/src/Misc/contentHash/externals/win-x64 b/src/Misc/contentHash/externals/win-x64 deleted file mode 100644 index 1c8dd6223..000000000 --- a/src/Misc/contentHash/externals/win-x64 +++ /dev/null @@ -1 +0,0 @@ -433a6d748742d12abd20dc2a79b62ac3d9718ae47ef26f8e84dc8c180eea3659 \ No newline at end of file diff --git a/src/Misc/dotnet-install.ps1 b/src/Misc/dotnet-install.ps1 index 44828c9df..39504d375 100644 --- a/src/Misc/dotnet-install.ps1 +++ b/src/Misc/dotnet-install.ps1 @@ -9,22 +9,41 @@ .DESCRIPTION Installs dotnet cli. If dotnet installation already exists in the given directory it will update it only if the requested version differs from the one already installed. + + Note that the intended use of this script is for Continuous Integration (CI) scenarios, where: + - The SDK needs to be installed without user interaction and without admin rights. + - The SDK installation doesn't need to persist across multiple CI runs. + To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer. + .PARAMETER Channel Default: LTS Download from the Channel specified. Possible values: - - Current - most current release - - LTS - most current supported release + - STS - the most recent Standard Term Support release + - LTS - the most recent Long Term Support release - 2-part version in a format A.B - represents a specific release examples: 2.0, 1.0 - - Branch name - examples: release/2.0.0, Master - Note: The version parameter overrides the channel parameter. + - 3-part version in a format A.B.Cxx - represents a specific SDK release + examples: 5.0.1xx, 5.0.2xx + Supported since 5.0 release + Warning: Value "Current" is deprecated for the Channel parameter. Use "STS" instead. + Note: The version parameter overrides the channel parameter when any version other than 'latest' is used. +.PARAMETER Quality + Download the latest build of specified quality in the channel. The possible values are: daily, signed, validated, preview, GA. + Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used. + For SDK use channel in A.B.Cxx format: using quality together with channel in A.B format is not supported. + Supported since 5.0 release. + Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality. .PARAMETER Version Default: latest Represents a build version on specific channel. Possible values: - - latest - most latest build on specific channel + - latest - the latest build on specific channel - 3-part version in a format A.B.C - represents specific version of build examples: 2.0.0-preview2-006120, 1.1.0 +.PARAMETER Internal + Download internal builds. Requires providing credentials via -FeedCredential parameter. +.PARAMETER FeedCredential + Token to access Azure feed. Used as a query string to append to the Azure feed. + This parameter typically is not specified. .PARAMETER InstallDir Default: %LocalAppData%\Microsoft\dotnet Path to where to install dotnet. Note that binaries will be placed directly in a given directory. @@ -53,15 +72,12 @@ .PARAMETER Verbose Displays diagnostics information. .PARAMETER AzureFeed - Default: https://dotnetcli.azureedge.net/dotnet - This parameter typically is not changed by the user. - It allows changing the URL for the Azure feed used by this installer. + Default: https://builds.dotnet.microsoft.com/dotnet + For internal use only. + Allows using a different storage to download SDK archives from. .PARAMETER UncachedFeed - This parameter typically is not changed by the user. - It allows changing the URL for the Uncached feed used by this installer. -.PARAMETER FeedCredential - Used as a query string to append to the Azure feed. - It allows changing the URL to use non-public blob storage accounts. + For internal use only. + Allows using a different storage to download SDK archives from. .PARAMETER ProxyAddress If set, the installer will use the proxy when making web requests .PARAMETER ProxyUseDefaultCredentials @@ -72,52 +88,53 @@ .PARAMETER SkipNonVersionedFiles Default: false Skips installing non-versioned files if they already exist, such as dotnet.exe. -.PARAMETER NoCdn - Disable downloading from the Azure CDN, and use the uncached feed directly. .PARAMETER JSonFile Determines the SDK version from a user specified global.json file Note: global.json must have a value for 'SDK:Version' +.PARAMETER DownloadTimeout + Determines timeout duration in seconds for dowloading of the SDK file + Default: 1200 seconds (20 minutes) +.PARAMETER KeepZip + If set, downloaded file is kept +.PARAMETER ZipPath + Use that path to store installer, generated by default +.EXAMPLE + dotnet-install.ps1 -Version 7.0.401 + Installs the .NET SDK version 7.0.401 +.EXAMPLE + dotnet-install.ps1 -Channel 8.0 -Quality GA + Installs the latest GA (general availability) version of the .NET 8.0 SDK #> [cmdletbinding()] param( - [string]$Channel="LTS", - [string]$Version="Latest", - [string]$JSonFile, - [string]$InstallDir="", - [string]$Architecture="", - [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)] - [string]$Runtime, - [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")] - [switch]$SharedRuntime, - [switch]$DryRun, - [switch]$NoPath, - [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet", - [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet", - [string]$FeedCredential, - [string]$ProxyAddress, - [switch]$ProxyUseDefaultCredentials, - [string[]]$ProxyBypassList=@(), - [switch]$SkipNonVersionedFiles, - [switch]$NoCdn + [string]$Channel = "LTS", + [string]$Quality, + [string]$Version = "Latest", + [switch]$Internal, + [string]$JSonFile, + [Alias('i')][string]$InstallDir = "", + [string]$Architecture = "", + [string]$Runtime, + [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")] + [switch]$SharedRuntime, + [switch]$DryRun, + [switch]$NoPath, + [string]$AzureFeed, + [string]$UncachedFeed, + [string]$FeedCredential, + [string]$ProxyAddress, + [switch]$ProxyUseDefaultCredentials, + [string[]]$ProxyBypassList = @(), + [switch]$SkipNonVersionedFiles, + [int]$DownloadTimeout = 1200, + [switch]$KeepZip, + [string]$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()), + [switch]$Help ) Set-StrictMode -Version Latest -$ErrorActionPreference="Stop" -$ProgressPreference="SilentlyContinue" - -if ($NoCdn) { - $AzureFeed = $UncachedFeed -} - -$BinFolderRelativePath="" - -if ($SharedRuntime -and (-not $Runtime)) { - $Runtime = "dotnet" -} - -# example path with regex: shared/1.0.0-beta-12345/somepath -$VersionRegEx="/\d+\.\d+[^/]+/" -$OverrideNonVersionedFiles = !$SkipNonVersionedFiles +$ErrorActionPreference = "Stop" +$ProgressPreference = "SilentlyContinue" function Say($str) { try { @@ -161,14 +178,38 @@ function Say-Verbose($str) { } } +function Measure-Action($name, $block) { + $time = Measure-Command $block + $totalSeconds = $time.TotalSeconds + Say-Verbose "Action '$name' took $totalSeconds seconds" +} + +function Get-Remote-File-Size($zipUri) { + try { + $response = Invoke-WebRequest -Uri $zipUri -Method Head + $fileSize = $response.Headers["Content-Length"] + if ((![string]::IsNullOrEmpty($fileSize))) { + Say "Remote file $zipUri size is $fileSize bytes." + + return $fileSize + } + } + catch { + Say-Verbose "Content-Length header was not extracted for $zipUri." + } + + return $null +} + function Say-Invocation($Invocation) { $command = $Invocation.MyCommand; $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ") Say-Verbose "$command $args" } -function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) { +function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [System.Threading.CancellationToken]$cancellationToken = [System.Threading.CancellationToken]::None, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) { $Attempts = 0 + $local:startTime = $(get-date) while ($true) { try { @@ -176,11 +217,15 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in } catch { $Attempts++ - if ($Attempts -lt $MaxAttempts) { + if (($Attempts -lt $MaxAttempts) -and -not $cancellationToken.IsCancellationRequested) { Start-Sleep $SecondsBetweenAttempts } else { - throw + $local:elapsedTime = $(get-date) - $local:startTime + if (($local:elapsedTime.TotalSeconds - $DownloadTimeout) -gt 0 -and -not $cancellationToken.IsCancellationRequested) { + throw New-Object System.TimeoutException("Failed to reach the server: connection timeout: default timeout is $DownloadTimeout second(s)"); + } + throw; } } } @@ -193,20 +238,33 @@ function Get-Machine-Architecture() { # To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432. # PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE. # Possible values: amd64, x64, x86, arm64, arm - - if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null ) - { + if ( $ENV:PROCESSOR_ARCHITEW6432 -ne $null ) { return $ENV:PROCESSOR_ARCHITEW6432 } + try { + if ( ((Get-CimInstance -ClassName CIM_OperatingSystem).OSArchitecture) -like "ARM*") { + if ( [Environment]::Is64BitOperatingSystem ) { + return "arm64" + } + return "arm" + } + } + catch { + # Machine doesn't support Get-CimInstance + } + return $ENV:PROCESSOR_ARCHITECTURE } function Get-CLIArchitecture-From-Architecture([string]$Architecture) { Say-Invocation $MyInvocation - switch ($Architecture.ToLower()) { - { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) } + if ($Architecture -eq "") { + $Architecture = Get-Machine-Architecture + } + + switch ($Architecture.ToLowerInvariant()) { { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" } { $_ -eq "x86" } { return "x86" } { $_ -eq "arm" } { return "arm" } @@ -215,20 +273,90 @@ function Get-CLIArchitecture-From-Architecture([string]$Architecture) { } } +function ValidateFeedCredential([string] $FeedCredential) { + if ($Internal -and [string]::IsNullOrWhitespace($FeedCredential)) { + $message = "Provide credentials via -FeedCredential parameter." + if ($DryRun) { + Say-Warning "$message" + } + else { + throw "$message" + } + } + + #FeedCredential should start with "?", for it to be added to the end of the link. + #adding "?" at the beginning of the FeedCredential if needed. + if ((![string]::IsNullOrWhitespace($FeedCredential)) -and ($FeedCredential[0] -ne '?')) { + $FeedCredential = "?" + $FeedCredential + } + + return $FeedCredential +} +function Get-NormalizedQuality([string]$Quality) { + Say-Invocation $MyInvocation + + if ([string]::IsNullOrEmpty($Quality)) { + return "" + } + + switch ($Quality) { + { @("daily", "signed", "validated", "preview") -contains $_ } { return $Quality.ToLowerInvariant() } + #ga quality is available without specifying quality, so normalizing it to empty + { $_ -eq "ga" } { return "" } + default { throw "'$Quality' is not a supported value for -Quality option. Supported values are: daily, signed, validated, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." } + } +} + +function Get-NormalizedChannel([string]$Channel) { + Say-Invocation $MyInvocation + + if ([string]::IsNullOrEmpty($Channel)) { + return "" + } + + if ($Channel.Contains("Current")) { + Say-Warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.' + } + + if ($Channel.StartsWith('release/')) { + Say-Warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead, such as "-Channel 5.0 -Quality Daily."' + } + + switch ($Channel) { + { $_ -eq "lts" } { return "LTS" } + { $_ -eq "sts" } { return "STS" } + { $_ -eq "current" } { return "STS" } + default { return $Channel.ToLowerInvariant() } + } +} + +function Get-NormalizedProduct([string]$Runtime) { + Say-Invocation $MyInvocation + + switch ($Runtime) { + { $_ -eq "dotnet" } { return "dotnet-runtime" } + { $_ -eq "aspnetcore" } { return "aspnetcore-runtime" } + { $_ -eq "windowsdesktop" } { return "windowsdesktop-runtime" } + { [string]::IsNullOrEmpty($_) } { return "dotnet-sdk" } + default { throw "'$Runtime' is not a supported value for -Runtime option, supported values are: dotnet, aspnetcore, windowsdesktop. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." } + } +} + + # The version text returned from the feeds is a 1-line or 2-line string: # For the SDK and the dotnet runtime (2 lines): # Line 1: # commit_hash # Line 2: # 4-part version # For the aspnetcore runtime (1 line): # Line 1: # 4-part version -function Get-Version-Info-From-Version-Text([string]$VersionText) { +function Get-Version-From-LatestVersion-File-Content([string]$VersionText) { Say-Invocation $MyInvocation $Data = -split $VersionText $VersionInfo = @{ CommitHash = $(if ($Data.Count -gt 1) { $Data[0] }) - Version = $Data[-1] # last line is always the version number. + Version = $Data[-1] # last line is always the version number. } return $VersionInfo } @@ -243,10 +371,10 @@ function Load-Assembly([string] $Assembly) { } } -function GetHTTPResponse([Uri] $Uri) -{ - Invoke-With-Retry( - { +function GetHTTPResponse([Uri] $Uri, [bool]$HeaderOnly, [bool]$DisableRedirect, [bool]$DisableFeedCredential) { + $cts = New-Object System.Threading.CancellationTokenSource + + $downloadScript = { $HttpClient = $null @@ -254,15 +382,21 @@ function GetHTTPResponse([Uri] $Uri) # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet. Load-Assembly -Assembly System.Net.Http - if(-not $ProxyAddress) { + if (-not $ProxyAddress) { try { # Despite no proxy being explicitly specified, we may still be behind a default proxy $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy; - if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) { - $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString + if ($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) { + if ($null -ne $DefaultProxy.GetProxy($Uri)) { + $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString + } + else { + $ProxyAddress = $null + } $ProxyUseDefaultCredentials = $true } - } catch { + } + catch { # Eat the exception and move forward as the above code is an attempt # at resolving the DefaultProxy that may not have been a problem. $ProxyAddress = $null @@ -270,32 +404,51 @@ function GetHTTPResponse([Uri] $Uri) } } - if($ProxyAddress) { - $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler - $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{ - Address=$ProxyAddress; - UseDefaultCredentials=$ProxyUseDefaultCredentials; - BypassList = $ProxyBypassList; + $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler + if ($ProxyAddress) { + $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{ + Address = $ProxyAddress; + UseDefaultCredentials = $ProxyUseDefaultCredentials; + BypassList = $ProxyBypassList; } - $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler + } + if ($DisableRedirect) { + $HttpClientHandler.AllowAutoRedirect = $false + } + $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler + + # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out + # Defaulting to 20 minutes allows it to work over much slower connections. + $HttpClient.Timeout = New-TimeSpan -Seconds $DownloadTimeout + + if ($HeaderOnly) { + $completionOption = [System.Net.Http.HttpCompletionOption]::ResponseHeadersRead } else { - - $HttpClient = New-Object System.Net.Http.HttpClient + $completionOption = [System.Net.Http.HttpCompletionOption]::ResponseContentRead } - # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out - # 20 minutes allows it to work over much slower connections. - $HttpClient.Timeout = New-TimeSpan -Minutes 20 - $Task = $HttpClient.GetAsync("${Uri}${FeedCredential}").ConfigureAwait("false"); + + if ($DisableFeedCredential) { + $UriWithCredential = $Uri + } + else { + $UriWithCredential = "${Uri}${FeedCredential}" + } + + $Task = $HttpClient.GetAsync("$UriWithCredential", $completionOption).ConfigureAwait("false"); $Response = $Task.GetAwaiter().GetResult(); - if (($null -eq $Response) -or (-not ($Response.IsSuccessStatusCode))) { + if (($null -eq $Response) -or ((-not $HeaderOnly) -and (-not ($Response.IsSuccessStatusCode)))) { # The feed credential is potentially sensitive info. Do not log FeedCredential to console output. $DownloadException = [System.Exception] "Unable to download $Uri." if ($null -ne $Response) { $DownloadException.Data["StatusCode"] = [int] $Response.StatusCode $DownloadException.Data["ErrorMessage"] = "Unable to download $Uri. Returned HTTP status code: " + $DownloadException.Data["StatusCode"] + + if (404 -eq [int] $Response.StatusCode) { + $cts.Cancel() + } } throw $DownloadException @@ -310,8 +463,8 @@ function GetHTTPResponse([Uri] $Uri) $CurrentException = $PSItem.Exception $ErrorMsg = $CurrentException.Message + "`r`n" while ($CurrentException.InnerException) { - $CurrentException = $CurrentException.InnerException - $ErrorMsg += $CurrentException.Message + "`r`n" + $CurrentException = $CurrentException.InnerException + $ErrorMsg += $CurrentException.Message + "`r`n" } # Check if there is an issue concerning TLS. @@ -323,37 +476,49 @@ function GetHTTPResponse([Uri] $Uri) throw $DownloadException } finally { - if ($HttpClient -ne $null) { + if ($null -ne $HttpClient) { $HttpClient.Dispose() } } - }) + } + + try { + return Invoke-With-Retry $downloadScript $cts.Token + } + finally { + if ($null -ne $cts) { + $cts.Dispose() + } + } } -function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) { +function Get-Version-From-LatestVersion-File([string]$AzureFeed, [string]$Channel) { Say-Invocation $MyInvocation $VersionFileUrl = $null if ($Runtime -eq "dotnet") { - $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version" + $VersionFileUrl = "$AzureFeed/Runtime/$Channel/latest.version" } elseif ($Runtime -eq "aspnetcore") { - $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version" + $VersionFileUrl = "$AzureFeed/aspnetcore/Runtime/$Channel/latest.version" } elseif ($Runtime -eq "windowsdesktop") { - $VersionFileUrl = "$UncachedFeed/WindowsDesktop/$Channel/latest.version" + $VersionFileUrl = "$AzureFeed/WindowsDesktop/$Channel/latest.version" } elseif (-not $Runtime) { - $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version" + $VersionFileUrl = "$AzureFeed/Sdk/$Channel/latest.version" } else { throw "Invalid value for `$Runtime" } + + Say-Verbose "Constructed latest.version URL: $VersionFileUrl" + try { $Response = GetHTTPResponse -Uri $VersionFileUrl } catch { - Say-Error "Could not resolve version information." + Say-Verbose "Failed to download latest.version file." throw } $StringContent = $Response.Content.ReadAsStringAsync().Result @@ -365,7 +530,7 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) { default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." } } - $VersionInfo = Get-Version-Info-From-Version-Text $VersionText + $VersionInfo = Get-Version-From-LatestVersion-File-Content $VersionText return $VersionInfo } @@ -411,8 +576,8 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, Say-Invocation $MyInvocation if (-not $JSonFile) { - if ($Version.ToLower() -eq "latest") { - $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel + if ($Version.ToLowerInvariant() -eq "latest") { + $LatestVersionInfo = Get-Version-From-LatestVersion-File -AzureFeed $AzureFeed -Channel $Channel return $LatestVersionInfo.Version } else { @@ -439,11 +604,9 @@ function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string elseif ($Runtime -eq "windowsdesktop") { # The windows desktop runtime is part of the core runtime layout prior to 5.0 $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip" - if ($SpecificVersion -match '^(\d+)\.(.*)$') - { + if ($SpecificVersion -match '^(\d+)\.(.*)$') { $majorVersion = [int]$Matches[1] - if ($majorVersion -ge 5) - { + if ($majorVersion -ge 5) { $PayloadURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip" } } @@ -478,58 +641,112 @@ function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [ return $PayloadURL } -function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) { +function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion, [string]$PackageDownloadLink) { Say-Invocation $MyInvocation - if ($Runtime -eq "dotnet") { - $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt" - } - elseif ($Runtime -eq "aspnetcore") { - $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt" - } - elseif ($Runtime -eq "windowsdesktop") { - # The windows desktop runtime is part of the core runtime layout prior to 5.0 - $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt" - if ($SpecificVersion -match '^(\d+)\.(.*)') - { - $majorVersion = [int]$Matches[1] - if ($majorVersion -ge 5) - { - $ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/productVersion.txt" + # Try to get the version number, using the productVersion.txt file located next to the installer file. + $ProductVersionTxtURLs = (Get-Product-Version-Url $AzureFeed $SpecificVersion $PackageDownloadLink -Flattened $true), + (Get-Product-Version-Url $AzureFeed $SpecificVersion $PackageDownloadLink -Flattened $false) + + Foreach ($ProductVersionTxtURL in $ProductVersionTxtURLs) { + Say-Verbose "Checking for the existence of $ProductVersionTxtURL" + + try { + $productVersionResponse = GetHTTPResponse($productVersionTxtUrl) + + if ($productVersionResponse.StatusCode -eq 200) { + $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim() + if ($productVersion -ne $SpecificVersion) { + Say "Using alternate version $productVersion found in $ProductVersionTxtURL" + } + return $productVersion } + else { + Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) when trying to get productVersion.txt at $productVersionTxtUrl." + } + } + catch { + Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl (Exception: '$($_.Exception.Message)'. )" } } - elseif (-not $Runtime) { - $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt" - } - else { - throw "Invalid value '$Runtime' specified for `$Runtime" + + # Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number. + if ([string]::IsNullOrEmpty($PackageDownloadLink)) { + Say-Verbose "Using the default value '$SpecificVersion' as the product version." + return $SpecificVersion } - Say-Verbose "Checking for existence of $ProductVersionTxtURL" + $productVersion = Get-ProductVersionFromDownloadLink $PackageDownloadLink $SpecificVersion + return $productVersion +} - try { - $productVersionResponse = GetHTTPResponse($productVersionTxtUrl) +function Get-Product-Version-Url([string]$AzureFeed, [string]$SpecificVersion, [string]$PackageDownloadLink, [bool]$Flattened) { + Say-Invocation $MyInvocation - if ($productVersionResponse.StatusCode -eq 200) { - $productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim() - if ($productVersion -ne $SpecificVersion) - { - Say "Using alternate version $productVersion found in $ProductVersionTxtURL" - } + $majorVersion = $null + if ($SpecificVersion -match '^(\d+)\.(.*)') { + $majorVersion = $Matches[1] -as [int] + } - return $productVersion + $pvFileName = 'productVersion.txt' + if ($Flattened) { + if (-not $Runtime) { + $pvFileName = 'sdk-productVersion.txt' + } + elseif ($Runtime -eq "dotnet") { + $pvFileName = 'runtime-productVersion.txt' } else { - Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion" - $productVersion = $SpecificVersion + $pvFileName = "$Runtime-productVersion.txt" } - } catch { - Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion (Exception: '$($_.Exception.Message)' )" - $productVersion = $SpecificVersion } - return $productVersion + if ([string]::IsNullOrEmpty($PackageDownloadLink)) { + if ($Runtime -eq "dotnet") { + $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/$pvFileName" + } + elseif ($Runtime -eq "aspnetcore") { + $ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/$pvFileName" + } + elseif ($Runtime -eq "windowsdesktop") { + # The windows desktop runtime is part of the core runtime layout prior to 5.0 + $ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/$pvFileName" + if ($majorVersion -ne $null -and $majorVersion -ge 5) { + $ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/$pvFileName" + } + } + elseif (-not $Runtime) { + $ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/$pvFileName" + } + else { + throw "Invalid value '$Runtime' specified for `$Runtime" + } + } + else { + $ProductVersionTxtURL = $PackageDownloadLink.Substring(0, $PackageDownloadLink.LastIndexOf("/")) + "/$pvFileName" + } + + Say-Verbose "Constructed productVersion link: $ProductVersionTxtURL" + + return $ProductVersionTxtURL +} + +function Get-ProductVersionFromDownloadLink([string]$PackageDownloadLink, [string]$SpecificVersion) { + Say-Invocation $MyInvocation + + #product specific version follows the product name + #for filename 'dotnet-sdk-3.1.404-win-x64.zip': the product version is 3.1.400 + $filename = $PackageDownloadLink.Substring($PackageDownloadLink.LastIndexOf("/") + 1) + $filenameParts = $filename.Split('-') + if ($filenameParts.Length -gt 2) { + $productVersion = $filenameParts[2] + Say-Verbose "Extracted product version '$productVersion' from download link '$PackageDownloadLink'." + } + else { + Say-Verbose "Using the default value '$SpecificVersion' as the product version." + $productVersion = $SpecificVersion + } + return $productVersion } function Get-User-Share-Path() { @@ -539,6 +756,9 @@ function Get-User-Share-Path() { if (!$InstallRoot) { $InstallRoot = "$env:LocalAppData\Microsoft\dotnet" } + elseif ($InstallRoot -like "$env:ProgramFiles\dotnet\?*") { + Say-Warning "The install root specified by the environment variable DOTNET_INSTALL_DIR points to the sub folder of $env:ProgramFiles\dotnet which is the default dotnet install root using .NET SDK installer. It is better to keep aligned with .NET SDK installer." + } return $InstallRoot } @@ -551,6 +771,19 @@ function Resolve-Installation-Path([string]$InstallDir) { return $InstallDir } +function Test-User-Write-Access([string]$InstallDir) { + try { + $tempFileName = [guid]::NewGuid().ToString() + $tempFilePath = Join-Path -Path $InstallDir -ChildPath $tempFileName + New-Item -Path $tempFilePath -ItemType File -Force + Remove-Item $tempFilePath -Force + return $true + } + catch { + return $false + } +} + function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) { Say-Invocation $MyInvocation @@ -567,7 +800,8 @@ function Get-Absolute-Path([string]$RelativeOrAbsolutePath) { } function Get-Path-Prefix-With-Version($path) { - $match = [regex]::match($path, $VersionRegEx) + # example path with regex: shared/1.0.0-beta-12345/somepath + $match = [regex]::match($path, "/\d+\.\d+[^/]+/") if ($match.Success) { return $entry.FullName.Substring(0, $match.Index + $match.Length) } @@ -581,7 +815,7 @@ function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([Sys $ret = @() foreach ($entry in $Zip.Entries) { $dir = Get-Path-Prefix-With-Version $entry.FullName - if ($dir -ne $null) { + if ($null -ne $dir) { $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir) if (-Not (Test-Path $path -PathType Container)) { $ret += $dir @@ -622,10 +856,10 @@ function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) { foreach ($entry in $Zip.Entries) { $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName - if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) { + if (($null -eq $PathWithVersion) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) { $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName) $DestinationDir = Split-Path -Parent $DestinationPath - $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath)) + $OverrideFiles = $OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath)) if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) { New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles) @@ -633,8 +867,12 @@ function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) { } } } + catch { + Say-Error "Failed to extract package. Exception: $_" + throw; + } finally { - if ($Zip -ne $null) { + if ($null -ne $Zip) { $Zip.Dispose() } } @@ -654,46 +892,70 @@ function DownloadFile($Source, [string]$OutPath) { } $Stream = $null - + try { $Response = GetHTTPResponse -Uri $Source $Stream = $Response.Content.ReadAsStreamAsync().Result $File = [System.IO.File]::Create($OutPath) $Stream.CopyTo($File) $File.Close() + + ValidateRemoteLocalFileSizes -LocalFileOutPath $OutPath -SourceUri $Source } finally { - if ($Stream -ne $null) { + if ($null -ne $Stream) { $Stream.Dispose() } } } +function ValidateRemoteLocalFileSizes([string]$LocalFileOutPath, $SourceUri) { + try { + $remoteFileSize = Get-Remote-File-Size -zipUri $SourceUri + $fileSize = [long](Get-Item $LocalFileOutPath).Length + Say "Downloaded file $SourceUri size is $fileSize bytes." + + if ((![string]::IsNullOrEmpty($remoteFileSize)) -and !([string]::IsNullOrEmpty($fileSize)) ) { + if ($remoteFileSize -ne $fileSize) { + Say "The remote and local file sizes are not equal. Remote file size is $remoteFileSize bytes and local size is $fileSize bytes. The local package may be corrupted." + } + else { + Say "The remote and local file sizes are equal." + } + } + else { + Say "Either downloaded or local package size can not be measured. One of them may be corrupted." + } + } + catch { + Say "Either downloaded or local package size can not be measured. One of them may be corrupted." + } +} + function SafeRemoveFile($Path) { try { if (Test-Path $Path) { Remove-Item $Path Say-Verbose "The temporary file `"$Path`" was removed." } - else - { + else { Say-Verbose "The temporary file `"$Path`" does not exist, therefore is not removed." } } - catch - { + catch { Say-Warning "Failed to remove the temporary file: `"$Path`", remove it manually." } } -function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) { - $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath) +function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot) { + $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath "") if (-Not $NoPath) { $SuffixedBinPath = "$BinPath;" if (-Not $env:path.Contains($SuffixedBinPath)) { Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process." $env:path = $SuffixedBinPath + $env:path - } else { + } + else { Say-Verbose "Current process PATH already contains `"$BinPath`"" } } @@ -702,394 +964,642 @@ function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolde } } -Say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" -Say "- The SDK needs to be installed without user interaction and without admin rights." -Say "- The SDK installation doesn't need to persist across multiple CI runs." -Say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.`r`n" - -$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture -$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile -$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture -$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture - -$InstallRoot = Resolve-Installation-Path $InstallDir -Say-Verbose "InstallRoot: $InstallRoot" -$ScriptName = $MyInvocation.MyCommand.Name - -if ($DryRun) { +function PrintDryRunOutput($Invocation, $DownloadLinks) { Say "Payload URLs:" - Say "Primary named payload URL: $DownloadLink" - if ($LegacyDownloadLink) { - Say "Legacy named payload URL: $LegacyDownloadLink" + + for ($linkIndex = 0; $linkIndex -lt $DownloadLinks.count; $linkIndex++) { + Say "URL #$linkIndex - $($DownloadLinks[$linkIndex].type): $($DownloadLinks[$linkIndex].downloadLink)" } $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`"" if ($Runtime -eq "dotnet") { - $RepeatableCommand+=" -Runtime `"dotnet`"" + $RepeatableCommand += " -Runtime `"dotnet`"" } elseif ($Runtime -eq "aspnetcore") { - $RepeatableCommand+=" -Runtime `"aspnetcore`"" + $RepeatableCommand += " -Runtime `"aspnetcore`"" } - foreach ($key in $MyInvocation.BoundParameters.Keys) { - if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) { - $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`"" + + foreach ($key in $Invocation.BoundParameters.Keys) { + if (-not (@("Architecture", "Channel", "DryRun", "InstallDir", "Runtime", "SharedRuntime", "Version", "Quality", "FeedCredential") -contains $key)) { + $RepeatableCommand += " -$key `"$($Invocation.BoundParameters[$key])`"" } } + if ($Invocation.BoundParameters.Keys -contains "FeedCredential") { + $RepeatableCommand += " -FeedCredential `"`"" + } Say "Repeatable invocation: $RepeatableCommand" - if ($SpecificVersion -ne $EffectiveVersion) - { + if ($SpecificVersion -ne $EffectiveVersion) { Say "NOTE: Due to finding a version manifest with this runtime, it would actually install with version '$EffectiveVersion'" } - - return } -if ($Runtime -eq "dotnet") { - $assetName = ".NET Core Runtime" - $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App" -} -elseif ($Runtime -eq "aspnetcore") { - $assetName = "ASP.NET Core Runtime" - $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App" -} -elseif ($Runtime -eq "windowsdesktop") { - $assetName = ".NET Core Windows Desktop Runtime" - $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App" -} -elseif (-not $Runtime) { - $assetName = ".NET Core SDK" - $dotnetPackageRelativePath = "sdk" -} -else { - throw "Invalid value for `$Runtime" -} - -if ($SpecificVersion -ne $EffectiveVersion) -{ - Say "Performing installation checks for effective version: $EffectiveVersion" - $SpecificVersion = $EffectiveVersion -} - -# Check if the SDK version is already installed. -$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion -if ($isAssetInstalled) { - Say "$assetName version $SpecificVersion is already installed." - Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath - return -} - -New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null - -$installDrive = $((Get-Item $InstallRoot).PSDrive.Name); -$diskInfo = Get-PSDrive -Name $installDrive -if ($diskInfo.Free / 1MB -le 100) { - throw "There is not enough disk space on drive ${installDrive}:" -} - -$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) -Say-Verbose "Zip path: $ZipPath" - -$DownloadFailed = $false - -$PrimaryDownloadStatusCode = 0 -$LegacyDownloadStatusCode = 0 - -$PrimaryDownloadFailedMsg = "" -$LegacyDownloadFailedMsg = "" - -Say "Downloading primary link $DownloadLink" -try { - DownloadFile -Source $DownloadLink -OutPath $ZipPath -} -catch { - if ($PSItem.Exception.Data.Contains("StatusCode")) { - $PrimaryDownloadStatusCode = $PSItem.Exception.Data["StatusCode"] - } - - if ($PSItem.Exception.Data.Contains("ErrorMessage")) { - $PrimaryDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"] - } else { - $PrimaryDownloadFailedMsg = $PSItem.Exception.Message - } - - if ($PrimaryDownloadStatusCode -eq 404) { - Say "The resource at $DownloadLink is not available." - } else { - Say $PSItem.Exception.Message - } - - SafeRemoveFile -Path $ZipPath - - if ($LegacyDownloadLink) { - $DownloadLink = $LegacyDownloadLink - $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) - Say-Verbose "Legacy zip path: $ZipPath" - Say "Downloading legacy link $DownloadLink" +# grab the 'stem' of the redirect and check it against all of our configured feeds, +# if it matches, we can be sure that the redirect is valid and we should use it for +# subsequent processing +function Sanitize-RedirectUrl([string]$url) { + $urlSegments = ([System.Uri]$url).Segments; + $urlStem = $urlSegments[2..($urlSegments.Length - 1)] -join ""; + Write-Verbose "Checking configured feeds for the asset at $urlStem" + foreach ($prospectiveFeed in $feeds) { + $trialUrl = "$prospectiveFeed/$urlStem"; + Write-Verbose "Checking $trialUrl" try { - DownloadFile -Source $DownloadLink -OutPath $ZipPath + $trialResponse = Invoke-WebRequest -Uri $trialUrl -Method HEAD + if ($trialResponse.StatusCode -eq 200) { + Write-Verbose "Found a match at $trialUrl" + return $trialUrl; + } + else { + Write-Verbose "No match at $trialUrl" + } } catch { - if ($PSItem.Exception.Data.Contains("StatusCode")) { - $LegacyDownloadStatusCode = $PSItem.Exception.Data["StatusCode"] - } - - if ($PSItem.Exception.Data.Contains("ErrorMessage")) { - $LegacyDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"] - } else { - $LegacyDownloadFailedMsg = $PSItem.Exception.Message - } - - if ($LegacyDownloadStatusCode -eq 404) { - Say "The resource at $DownloadLink is not available." - } else { - Say $PSItem.Exception.Message - } - - SafeRemoveFile -Path $ZipPath - $DownloadFailed = $true + Write-Verbose "Failed to check $trialUrl" } } +} + +function Get-AkaMSDownloadLink([string]$Channel, [string]$Quality, [bool]$Internal, [string]$Product, [string]$Architecture) { + Say-Invocation $MyInvocation + + #quality is not supported for LTS or STS channel + if (![string]::IsNullOrEmpty($Quality) -and (@("LTS", "STS") -contains $Channel)) { + $Quality = "" + Say-Warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored." + } + Say-Verbose "Retrieving primary payload URL from aka.ms link for channel: '$Channel', quality: '$Quality' product: '$Product', os: 'win', architecture: '$Architecture'." + + #construct aka.ms link + $akaMsLink = "https://aka.ms/dotnet" + if ($Internal) { + $akaMsLink += "/internal" + } + $akaMsLink += "/$Channel" + if (-not [string]::IsNullOrEmpty($Quality)) { + $akaMsLink += "/$Quality" + } + $akaMsLink += "/$Product-win-$Architecture.zip" + Say-Verbose "Constructed aka.ms link: '$akaMsLink'." + $akaMsDownloadLink = $null + + for ($maxRedirections = 9; $maxRedirections -ge 0; $maxRedirections--) { + #get HTTP response + #do not pass credentials as a part of the $akaMsLink and do not apply credentials in the GetHTTPResponse function + #otherwise the redirect link would have credentials as well + #it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link + $Response = GetHTTPResponse -Uri $akaMsLink -HeaderOnly $true -DisableRedirect $true -DisableFeedCredential $true + Say-Verbose "Received response:`n$Response" + + if ([string]::IsNullOrEmpty($Response)) { + Say-Verbose "The link '$akaMsLink' is not valid: failed to get redirect location. The resource is not available." + return $null + } + + #if HTTP code is 301 (Moved Permanently), the redirect link exists + if ($Response.StatusCode -eq 301) { + try { + $akaMsDownloadLink = $Response.Headers.GetValues("Location")[0] + + if ([string]::IsNullOrEmpty($akaMsDownloadLink)) { + Say-Verbose "The link '$akaMsLink' is not valid: server returned 301 (Moved Permanently), but the headers do not contain the redirect location." + return $null + } + + Say-Verbose "The redirect location retrieved: '$akaMsDownloadLink'." + # This may yet be a link to another redirection. Attempt to retrieve the page again. + $akaMsLink = $akaMsDownloadLink + continue + } + catch { + Say-Verbose "The link '$akaMsLink' is not valid: failed to get redirect location." + return $null + } + } + elseif ((($Response.StatusCode -lt 300) -or ($Response.StatusCode -ge 400)) -and (-not [string]::IsNullOrEmpty($akaMsDownloadLink))) { + # Redirections have ended. + $actualRedirectUrl = Sanitize-RedirectUrl $akaMsDownloadLink + if ($null -ne $actualRedirectUrl) { + $akaMsDownloadLink = $actualRedirectUrl + } + + return $akaMsDownloadLink + } + + Say-Verbose "The link '$akaMsLink' is not valid: failed to retrieve the redirection location." + return $null + } + + Say-Verbose "Aka.ms links have redirected more than the maximum allowed redirections. This may be caused by a cyclic redirection of aka.ms links." + return $null + +} + +function Get-AkaMsLink-And-Version([string] $NormalizedChannel, [string] $NormalizedQuality, [bool] $Internal, [string] $ProductName, [string] $Architecture) { + $AkaMsDownloadLink = Get-AkaMSDownloadLink -Channel $NormalizedChannel -Quality $NormalizedQuality -Internal $Internal -Product $ProductName -Architecture $Architecture + + if ([string]::IsNullOrEmpty($AkaMsDownloadLink)) { + if (-not [string]::IsNullOrEmpty($NormalizedQuality)) { + # if quality is specified - exit with error - there is no fallback approach + Say-Error "Failed to locate the latest version in the channel '$NormalizedChannel' with '$NormalizedQuality' quality for '$ProductName', os: 'win', architecture: '$Architecture'." + Say-Error "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support." + throw "aka.ms link resolution failure" + } + Say-Verbose "Falling back to latest.version file approach." + return ($null, $null, $null) + } else { - $DownloadFailed = $true + Say-Verbose "Retrieved primary named payload URL from aka.ms link: '$AkaMsDownloadLink'." + Say-Verbose "Downloading using legacy url will not be attempted." + + #get version from the path + $pathParts = $AkaMsDownloadLink.Split('/') + if ($pathParts.Length -ge 2) { + $SpecificVersion = $pathParts[$pathParts.Length - 2] + Say-Verbose "Version: '$SpecificVersion'." + } + else { + Say-Error "Failed to extract the version from download link '$AkaMsDownloadLink'." + return ($null, $null, $null) + } + + #retrieve effective (product) version + $EffectiveVersion = Get-Product-Version -SpecificVersion $SpecificVersion -PackageDownloadLink $AkaMsDownloadLink + Say-Verbose "Product version: '$EffectiveVersion'." + + return ($AkaMsDownloadLink, $SpecificVersion, $EffectiveVersion); } } -if ($DownloadFailed) { - if (($PrimaryDownloadStatusCode -eq 404) -and ((-not $LegacyDownloadLink) -or ($LegacyDownloadStatusCode -eq 404))) { - throw "Could not find `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" - } else { - # 404-NotFound is an expected response if it goes from only one of the links, do not show that error. - # If primary path is available (not 404-NotFound) then show the primary error else show the legacy error. - if ($PrimaryDownloadStatusCode -ne 404) { - throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$PrimaryDownloadFailedMsg" - } - if (($LegacyDownloadLink) -and ($LegacyDownloadStatusCode -ne 404)) { - throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$LegacyDownloadFailedMsg" - } - throw "Could not download `"$assetName`" with version = $SpecificVersion" +function Get-Feeds-To-Use() { + $feeds = @( + "https://builds.dotnet.microsoft.com/dotnet" + "https://ci.dot.net/public" + ) + + if (-not [string]::IsNullOrEmpty($AzureFeed)) { + $feeds = @($AzureFeed) + } + + if (-not [string]::IsNullOrEmpty($UncachedFeed)) { + $feeds = @($UncachedFeed) + } + + Write-Verbose "Initialized feeds: $feeds" + + return $feeds +} + +function Resolve-AssetName-And-RelativePath([string] $Runtime) { + + if ($Runtime -eq "dotnet") { + $assetName = ".NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App" + } + elseif ($Runtime -eq "aspnetcore") { + $assetName = "ASP.NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App" + } + elseif ($Runtime -eq "windowsdesktop") { + $assetName = ".NET Core Windows Desktop Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App" + } + elseif (-not $Runtime) { + $assetName = ".NET Core SDK" + $dotnetPackageRelativePath = "sdk" + } + else { + throw "Invalid value for `$Runtime" + } + + return ($assetName, $dotnetPackageRelativePath) +} + +function Prepare-Install-Directory { + $diskSpaceWarning = "Failed to check the disk space. Installation will continue, but it may fail if you do not have enough disk space."; + + if ($PSVersionTable.PSVersion.Major -lt 7) { + Say-Verbose $diskSpaceWarning + return + } + + New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null + + $installDrive = $((Get-Item $InstallRoot -Force).PSDrive.Name); + $diskInfo = $null + try { + $diskInfo = Get-PSDrive -Name $installDrive + } + catch { + Say-Warning $diskSpaceWarning + } + + # The check is relevant for PS version >= 7, the result can be irrelevant for older versions. See https://github.com/PowerShell/PowerShell/issues/12442. + if ( ($null -ne $diskInfo) -and ($diskInfo.Free / 1MB -le 100)) { + throw "There is not enough disk space on drive ${installDrive}:" } } -Say "Extracting zip from $DownloadLink" -Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot +if ($Help) { + Get-Help $PSCommandPath -Examples + exit +} + +Say-Verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" +Say-Verbose "- The SDK needs to be installed without user interaction and without admin rights." +Say-Verbose "- The SDK installation doesn't need to persist across multiple CI runs." +Say-Verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.`r`n" + +if ($SharedRuntime -and (-not $Runtime)) { + $Runtime = "dotnet" +} + +$OverrideNonVersionedFiles = !$SkipNonVersionedFiles + +Measure-Action "Product discovery" { + $script:CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture + $script:NormalizedQuality = Get-NormalizedQuality $Quality + Say-Verbose "Normalized quality: '$NormalizedQuality'" + $script:NormalizedChannel = Get-NormalizedChannel $Channel + Say-Verbose "Normalized channel: '$NormalizedChannel'" + $script:NormalizedProduct = Get-NormalizedProduct $Runtime + Say-Verbose "Normalized product: '$NormalizedProduct'" + $script:FeedCredential = ValidateFeedCredential $FeedCredential +} + +$InstallRoot = Resolve-Installation-Path $InstallDir +if (-not (Test-User-Write-Access $InstallRoot)) { + Say-Error "The current user doesn't have write access to the installation root '$InstallRoot' to install .NET. Please try specifying a different installation directory using the -InstallDir parameter, or ensure the selected directory has the appropriate permissions." + throw +} +Say-Verbose "InstallRoot: $InstallRoot" +$ScriptName = $MyInvocation.MyCommand.Name +($assetName, $dotnetPackageRelativePath) = Resolve-AssetName-And-RelativePath -Runtime $Runtime + +$feeds = Get-Feeds-To-Use +$DownloadLinks = @() + +if ($Version.ToLowerInvariant() -ne "latest" -and -not [string]::IsNullOrEmpty($Quality)) { + throw "Quality and Version options are not allowed to be specified simultaneously. See https:// learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details." +} + +# aka.ms links can only be used if the user did not request a specific version via the command line or a global.json file. +if ([string]::IsNullOrEmpty($JSonFile) -and ($Version -eq "latest")) { + ($DownloadLink, $SpecificVersion, $EffectiveVersion) = Get-AkaMsLink-And-Version $NormalizedChannel $NormalizedQuality $Internal $NormalizedProduct $CLIArchitecture + + if ($null -ne $DownloadLink) { + $DownloadLinks += New-Object PSObject -Property @{downloadLink = "$DownloadLink"; specificVersion = "$SpecificVersion"; effectiveVersion = "$EffectiveVersion"; type = 'aka.ms' } + Say-Verbose "Generated aka.ms link $DownloadLink with version $EffectiveVersion" + + if (-Not $DryRun) { + Say-Verbose "Checking if the version $EffectiveVersion is already installed" + if (Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $EffectiveVersion) { + Say "$assetName with version '$EffectiveVersion' is already installed." + Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot + return + } + } + } +} + +# Primary and legacy links cannot be used if a quality was specified. +# If we already have an aka.ms link, no need to search the blob feeds. +if ([string]::IsNullOrEmpty($NormalizedQuality) -and 0 -eq $DownloadLinks.count) { + foreach ($feed in $feeds) { + try { + $SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $feed -Channel $Channel -Version $Version -JSonFile $JSonFile + $DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $feed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture + $LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $feed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture + + $DownloadLinks += New-Object PSObject -Property @{downloadLink = "$DownloadLink"; specificVersion = "$SpecificVersion"; effectiveVersion = "$EffectiveVersion"; type = 'primary' } + Say-Verbose "Generated primary link $DownloadLink with version $EffectiveVersion" + + if (-not [string]::IsNullOrEmpty($LegacyDownloadLink)) { + $DownloadLinks += New-Object PSObject -Property @{downloadLink = "$LegacyDownloadLink"; specificVersion = "$SpecificVersion"; effectiveVersion = "$EffectiveVersion"; type = 'legacy' } + Say-Verbose "Generated legacy link $LegacyDownloadLink with version $EffectiveVersion" + } + + if (-Not $DryRun) { + Say-Verbose "Checking if the version $EffectiveVersion is already installed" + if (Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $EffectiveVersion) { + Say "$assetName with version '$EffectiveVersion' is already installed." + Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot + return + } + } + } + catch { + Say-Verbose "Failed to acquire download links from feed $feed. Exception: $_" + } + } +} + +if ($DownloadLinks.count -eq 0) { + throw "Failed to resolve the exact version number." +} + +if ($DryRun) { + PrintDryRunOutput $MyInvocation $DownloadLinks + return +} + +Measure-Action "Installation directory preparation" { Prepare-Install-Directory } + +Say-Verbose "Zip path: $ZipPath" + +$DownloadSucceeded = $false +$DownloadedLink = $null +$ErrorMessages = @() + +foreach ($link in $DownloadLinks) { + Say-Verbose "Downloading `"$($link.type)`" link $($link.downloadLink)" + + try { + Measure-Action "Package download" { DownloadFile -Source $link.downloadLink -OutPath $ZipPath } + Say-Verbose "Download succeeded." + $DownloadSucceeded = $true + $DownloadedLink = $link + break + } + catch { + $StatusCode = $null + $ErrorMessage = $null + + if ($PSItem.Exception.Data.Contains("StatusCode")) { + $StatusCode = $PSItem.Exception.Data["StatusCode"] + } + + if ($PSItem.Exception.Data.Contains("ErrorMessage")) { + $ErrorMessage = $PSItem.Exception.Data["ErrorMessage"] + } + else { + $ErrorMessage = $PSItem.Exception.Message + } + + Say-Verbose "Download failed with status code $StatusCode. Error message: $ErrorMessage" + $ErrorMessages += "Downloading from `"$($link.type)`" link has failed with error:`nUri: $($link.downloadLink)`nStatusCode: $StatusCode`nError: $ErrorMessage" + } + + # This link failed. Clean up before trying the next one. + SafeRemoveFile -Path $ZipPath +} + +if (-not $DownloadSucceeded) { + foreach ($ErrorMessage in $ErrorMessages) { + Say-Error $ErrorMessages + } + + throw "Could not find `"$assetName`" with version = $($DownloadLinks[0].effectiveVersion)`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET support" +} + +Say "Extracting the archive." +Measure-Action "Package extraction" { Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot } # Check if the SDK version is installed; if not, fail the installation. $isAssetInstalled = $false # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed. -if ($SpecificVersion -Match "rtm" -or $SpecificVersion -Match "servicing") { - $ReleaseVersion = $SpecificVersion.Split("-")[0] +if ($DownloadedLink.effectiveVersion -Match "rtm" -or $DownloadedLink.effectiveVersion -Match "servicing") { + $ReleaseVersion = $DownloadedLink.effectiveVersion.Split("-")[0] Say-Verbose "Checking installation: version = $ReleaseVersion" $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $ReleaseVersion } # Check if the SDK version is installed. if (!$isAssetInstalled) { - Say-Verbose "Checking installation: version = $SpecificVersion" - $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion + Say-Verbose "Checking installation: version = $($DownloadedLink.effectiveVersion)" + $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $DownloadedLink.effectiveVersion } # Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm. if (!$isAssetInstalled) { - Say-Error "Failed to verify the version of installed `"$assetName`".`nInstallation source: $DownloadLink.`nInstallation location: $InstallRoot.`nReport the bug at https://github.com/dotnet/install-scripts/issues." - throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error." + Say-Error "Failed to verify the version of installed `"$assetName`".`nInstallation source: $($DownloadedLink.downloadLink).`nInstallation location: $InstallRoot.`nReport the bug at https://github.com/dotnet/install-scripts/issues." + throw "`"$assetName`" with version = $($DownloadedLink.effectiveVersion) failed to install with an unknown error." } -SafeRemoveFile -Path $ZipPath +if (-not $KeepZip) { + SafeRemoveFile -Path $ZipPath +} -Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath +Measure-Action "Setting up shell environment" { Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot } -Say "Note that the script does not resolve dependencies during installation." -Say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install/windows#dependencies" +Say "Note that the script does not ensure your Windows version is supported during the installation." +Say "To check the list of supported versions, go to https://learn.microsoft.com/dotnet/core/install/windows#supported-versions" +Say "Installed version is $($DownloadedLink.effectiveVersion)" Say "Installation finished" + # SIG # Begin signature block -# MIIjjwYJKoZIhvcNAQcCoIIjgDCCI3wCAQExDzANBglghkgBZQMEAgEFADB5Bgor +# MIIoRgYJKoZIhvcNAQcCoIIoNzCCKDMCAQExDzANBglghkgBZQMEAgEFADB5Bgor # BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG -# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCNsnhcJvx/hXmM -# w8KjuvvIMDBFonhg9XJFc1QwfTyH4aCCDYEwggX/MIID56ADAgECAhMzAAABh3IX -# chVZQMcJAAAAAAGHMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD +# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAA6hOL3sfG/4jH +# iO4VqZoOTVqC+yp2rOhb1M2cc+ic7KCCDXYwggX0MIID3KADAgECAhMzAAAEBGx0 +# Bv9XKydyAAAAAAQEMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD # VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy # b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p -# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ3WhcNMjEwMzAzMTgzOTQ3WjB0MQsw +# bmcgUENBIDIwMTEwHhcNMjQwOTEyMjAxMTE0WhcNMjUwOTExMjAxMTE0WjB0MQsw # CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u # ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy # b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -# AQDOt8kLc7P3T7MKIhouYHewMFmnq8Ayu7FOhZCQabVwBp2VS4WyB2Qe4TQBT8aB -# znANDEPjHKNdPT8Xz5cNali6XHefS8i/WXtF0vSsP8NEv6mBHuA2p1fw2wB/F0dH -# sJ3GfZ5c0sPJjklsiYqPw59xJ54kM91IOgiO2OUzjNAljPibjCWfH7UzQ1TPHc4d -# weils8GEIrbBRb7IWwiObL12jWT4Yh71NQgvJ9Fn6+UhD9x2uk3dLj84vwt1NuFQ -# itKJxIV0fVsRNR3abQVOLqpDugbr0SzNL6o8xzOHL5OXiGGwg6ekiXA1/2XXY7yV -# Fc39tledDtZjSjNbex1zzwSXAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE -# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUhov4ZyO96axkJdMjpzu2zVXOJcsw -# UAYDVR0RBEkwR6RFMEMxKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVyYXRpb25zIFB1 -# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDU4Mzg1MB8GA1UdIwQYMBaAFEhu -# ZOVQBdOCqhc3NyK1bajKdQKVMFQGA1UdHwRNMEswSaBHoEWGQ2h0dHA6Ly93d3cu -# bWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY0NvZFNpZ1BDQTIwMTFfMjAxMS0w -# Ny0wOC5jcmwwYQYIKwYBBQUHAQEEVTBTMFEGCCsGAQUFBzAChkVodHRwOi8vd3d3 -# Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY0NvZFNpZ1BDQTIwMTFfMjAx -# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAixmy -# S6E6vprWD9KFNIB9G5zyMuIjZAOuUJ1EK/Vlg6Fb3ZHXjjUwATKIcXbFuFC6Wr4K -# NrU4DY/sBVqmab5AC/je3bpUpjtxpEyqUqtPc30wEg/rO9vmKmqKoLPT37svc2NV -# BmGNl+85qO4fV/w7Cx7J0Bbqk19KcRNdjt6eKoTnTPHBHlVHQIHZpMxacbFOAkJr -# qAVkYZdz7ikNXTxV+GRb36tC4ByMNxE2DF7vFdvaiZP0CVZ5ByJ2gAhXMdK9+usx -# zVk913qKde1OAuWdv+rndqkAIm8fUlRnr4saSCg7cIbUwCCf116wUJ7EuJDg0vHe -# yhnCeHnBbyH3RZkHEi2ofmfgnFISJZDdMAeVZGVOh20Jp50XBzqokpPzeZ6zc1/g -# yILNyiVgE+RPkjnUQshd1f1PMgn3tns2Cz7bJiVUaqEO3n9qRFgy5JuLae6UweGf -# AeOo3dgLZxikKzYs3hDMaEtJq8IP71cX7QXe6lnMmXU/Hdfz2p897Zd+kU+vZvKI -# 3cwLfuVQgK2RZ2z+Kc3K3dRPz2rXycK5XCuRZmvGab/WbrZiC7wJQapgBodltMI5 -# GMdFrBg9IeF7/rP4EqVQXeKtevTlZXjpuNhhjuR+2DMt/dWufjXpiW91bo3aH6Ea -# jOALXmoxgltCp1K7hrS6gmsvj94cLRf50QQ4U8Qwggd6MIIFYqADAgECAgphDpDS -# AAAAAAADMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UECBMK -# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0 -# IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZpY2F0 -# ZSBBdXRob3JpdHkgMjAxMTAeFw0xMTA3MDgyMDU5MDlaFw0yNjA3MDgyMTA5MDla -# MH4xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS -# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMT -# H01pY3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTEwggIiMA0GCSqGSIb3DQEB -# AQUAA4ICDwAwggIKAoICAQCr8PpyEBwurdhuqoIQTTS68rZYIZ9CGypr6VpQqrgG -# OBoESbp/wwwe3TdrxhLYC/A4wpkGsMg51QEUMULTiQ15ZId+lGAkbK+eSZzpaF7S -# 35tTsgosw6/ZqSuuegmv15ZZymAaBelmdugyUiYSL+erCFDPs0S3XdjELgN1q2jz -# y23zOlyhFvRGuuA4ZKxuZDV4pqBjDy3TQJP4494HDdVceaVJKecNvqATd76UPe/7 -# 4ytaEB9NViiienLgEjq3SV7Y7e1DkYPZe7J7hhvZPrGMXeiJT4Qa8qEvWeSQOy2u -# M1jFtz7+MtOzAz2xsq+SOH7SnYAs9U5WkSE1JcM5bmR/U7qcD60ZI4TL9LoDho33 -# X/DQUr+MlIe8wCF0JV8YKLbMJyg4JZg5SjbPfLGSrhwjp6lm7GEfauEoSZ1fiOIl -# XdMhSz5SxLVXPyQD8NF6Wy/VI+NwXQ9RRnez+ADhvKwCgl/bwBWzvRvUVUvnOaEP -# 6SNJvBi4RHxF5MHDcnrgcuck379GmcXvwhxX24ON7E1JMKerjt/sW5+v/N2wZuLB -# l4F77dbtS+dJKacTKKanfWeA5opieF+yL4TXV5xcv3coKPHtbcMojyyPQDdPweGF -# RInECUzF1KVDL3SV9274eCBYLBNdYJWaPk8zhNqwiBfenk70lrC8RqBsmNLg1oiM -# CwIDAQABo4IB7TCCAekwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFEhuZOVQ -# BdOCqhc3NyK1bajKdQKVMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1Ud -# DwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFHItOgIxkEO5FAVO -# 4eqnxzHRI4k0MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9jcmwubWljcm9zb2Z0 -# LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y -# Mi5jcmwwXgYIKwYBBQUHAQEEUjBQME4GCCsGAQUFBzAChkJodHRwOi8vd3d3Lm1p -# Y3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y -# Mi5jcnQwgZ8GA1UdIASBlzCBlDCBkQYJKwYBBAGCNy4DMIGDMD8GCCsGAQUFBwIB -# FjNodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2RvY3MvcHJpbWFyeWNw -# cy5odG0wQAYIKwYBBQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AcABvAGwAaQBjAHkA -# XwBzAHQAYQB0AGUAbQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAGfyhqWY -# 4FR5Gi7T2HRnIpsLlhHhY5KZQpZ90nkMkMFlXy4sPvjDctFtg/6+P+gKyju/R6mj -# 82nbY78iNaWXXWWEkH2LRlBV2AySfNIaSxzzPEKLUtCw/WvjPgcuKZvmPRul1LUd -# d5Q54ulkyUQ9eHoj8xN9ppB0g430yyYCRirCihC7pKkFDJvtaPpoLpWgKj8qa1hJ -# Yx8JaW5amJbkg/TAj/NGK978O9C9Ne9uJa7lryft0N3zDq+ZKJeYTQ49C/IIidYf -# wzIY4vDFLc5bnrRJOQrGCsLGra7lstnbFYhRRVg4MnEnGn+x9Cf43iw6IGmYslmJ -# aG5vp7d0w0AFBqYBKig+gj8TTWYLwLNN9eGPfxxvFX1Fp3blQCplo8NdUmKGwx1j -# NpeG39rz+PIWoZon4c2ll9DuXWNB41sHnIc+BncG0QaxdR8UvmFhtfDcxhsEvt9B -# xw4o7t5lL+yX9qFcltgA1qFGvVnzl6UJS0gQmYAf0AApxbGbpT9Fdx41xtKiop96 -# eiL6SJUfq/tHI4D1nvi/a7dLl+LrdXga7Oo3mXkYS//WsyNodeav+vyL6wuA6mk7 -# r/ww7QRMjt/fdW1jkT3RnVZOT7+AVyKheBEyIXrvQQqxP/uozKRdwaGIm1dxVk5I -# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVZDCCFWACAQEwgZUwfjELMAkG -# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx -# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEoMCYGA1UEAxMfTWljcm9z -# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAYdyF3IVWUDHCQAAAAABhzAN -# BglghkgBZQMEAgEFAKCBrjAZBgkqhkiG9w0BCQMxDAYKKwYBBAGCNwIBBDAcBgor -# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQgpT/bxWwe -# aW0EinKMWCAzDXUjwXkIHldYzR6lw4/1Pc0wQgYKKwYBBAGCNwIBDDE0MDKgFIAS -# AE0AaQBjAHIAbwBzAG8AZgB0oRqAGGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbTAN -# BgkqhkiG9w0BAQEFAASCAQCHd7sSQVq0YDg8QDx6/kLWn3s6jtvvIDCCgsO9spHM -# quPd4FPbG67DCsKDClekQs52qrtRO3Zo+JMnCw4j3bS+gZHzeJr2shbftOrpsFoD -# l7OPcUmtrqul9dkQCOp8t0MP3ls0n96/YyNy6lz4BAlTdkdDx957uAxalKaCIBzb -# R9QyppOKIfNFvwD4EI5KI6tpmSy/uH8SrRg7ZExAYZl6J6R18WkL7KHn649lPoAQ -# ujwrIXH10xOJops45ILGzKWQcHmCzLJGYapL4VHUuK+73nT+9ZROGHdk/PyvIcdw -# iERa+C06v305t3DA+CuHFy1tvyw7IFF6RVbLZPwxrJjToYIS7jCCEuoGCisGAQQB -# gjcDAwExghLaMIIS1gYJKoZIhvcNAQcCoIISxzCCEsMCAQMxDzANBglghkgBZQME -# AgEFADCCAVUGCyqGSIb3DQEJEAEEoIIBRASCAUAwggE8AgEBBgorBgEEAYRZCgMB -# MDEwDQYJYIZIAWUDBAIBBQAEIOCaTmvM1AP0WaEVqzKaaCu/R+bTlR4kCrM/ZXsb -# /eNOAgZgGeLsMwsYEzIwMjEwMjAzMjExNzQ5LjU5MVowBIACAfSggdSkgdEwgc4x -# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt -# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1p -# Y3Jvc29mdCBPcGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMg -# VFNTIEVTTjo4OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt -# U3RhbXAgU2VydmljZaCCDkEwggT1MIID3aADAgECAhMzAAABLCKvRZd1+RvuAAAA -# AAEsMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo -# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y -# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw -# MB4XDTE5MTIxOTAxMTUwM1oXDTIxMDMxNzAxMTUwM1owgc4xCzAJBgNVBAYTAlVT -# MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQK -# ExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVy -# YXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4OTdB -# LUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vydmlj -# ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPK1zgSSq+MxAYo3qpCt -# QDxSMPPJy6mm/wfEJNjNUnYtLFBwl1BUS5trEk/t41ldxITKehs+ABxYqo4Qxsg3 -# Gy1ugKiwHAnYiiekfC+ZhptNFgtnDZIn45zC0AlVr/6UfLtsLcHCh1XElLUHfEC0 -# nBuQcM/SpYo9e3l1qY5NdMgDGxCsmCKdiZfYXIu+U0UYIBhdzmSHnB3fxZOBVcr5 -# htFHEBBNt/rFJlm/A4yb8oBsp+Uf0p5QwmO/bCcdqB15JpylOhZmWs0sUfJKlK9E -# rAhBwGki2eIRFKsQBdkXS9PWpF1w2gIJRvSkDEaCf+lbGTPdSzHSbfREWOF9wY3i -# Yj8CAwEAAaOCARswggEXMB0GA1UdDgQWBBRRahZSGfrCQhCyIyGH9DkiaW7L0zAf -# BgNVHSMEGDAWgBTVYzpcijGQ80N7fEYbxTNoWoVtVTBWBgNVHR8ETzBNMEugSaBH -# hkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNU -# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcmwwWgYIKwYBBQUHAQEETjBMMEoGCCsGAQUF -# BzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1RpbVN0 -# YVBDQV8yMDEwLTA3LTAxLmNydDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoGCCsG -# AQUFBwMIMA0GCSqGSIb3DQEBCwUAA4IBAQBPFxHIwi4vAH49w9Svmz6K3tM55RlW -# 5pPeULXdut2Rqy6Ys0+VpZsbuaEoxs6Z1C3hMbkiqZFxxyltxJpuHTyGTg61zfNI -# F5n6RsYF3s7IElDXNfZznF1/2iWc6uRPZK8rxxUJ/7emYXZCYwuUY0XjsCpP9pbR -# RKeJi6r5arSyI+NfKxvgoM21JNt1BcdlXuAecdd/k8UjxCscffanoK2n6LFw1PcZ -# lEO7NId7o+soM2C0QY5BYdghpn7uqopB6ixyFIIkDXFub+1E7GmAEwfU6VwEHL7y -# 9rNE8bd+JrQs+yAtkkHy9FmXg/PsGq1daVzX1So7CJ6nyphpuHSN3VfTMIIGcTCC -# BFmgAwIBAgIKYQmBKgAAAAAAAjANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMC -# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV -# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJv -# b3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTAwHhcNMTAwNzAxMjEzNjU1WhcN -# MjUwNzAxMjE0NjU1WjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv -# bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0 -# aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDCCASIw -# DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkdDbx3EYo6IOz8E5f1+n9plGt0 -# VBDVpQoAgoX77XxoSyxfxcPlYcJ2tz5mK1vwFVMnBDEfQRsalR3OCROOfGEwWbEw -# RA/xYIiEVEMM1024OAizQt2TrNZzMFcmgqNFDdDq9UeBzb8kYDJYYEbyWEeGMoQe -# dGFnkV+BVLHPk0ySwcSmXdFhE24oxhr5hoC732H8RsEnHSRnEnIaIYqvS2SJUGKx -# Xf13Hz3wV3WsvYpCTUBR0Q+cBj5nf/VmwAOWRH7v0Ev9buWayrGo8noqCjHw2k4G -# kbaICDXoeByw6ZnNPOcvRLqn9NxkvaQBwSAJk3jN/LzAyURdXhacAQVPIk0CAwEA -# AaOCAeYwggHiMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBTVYzpcijGQ80N7 -# fEYbxTNoWoVtVTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMC -# AYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTV9lbLj+iiXGJo0T2UkFvX -# zpoYxDBWBgNVHR8ETzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20v -# cGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcmwwWgYI -# KwYBBQUHAQEETjBMMEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5j -# b20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNydDCBoAYDVR0g -# AQH/BIGVMIGSMIGPBgkrBgEEAYI3LgMwgYEwPQYIKwYBBQUHAgEWMWh0dHA6Ly93 -# d3cubWljcm9zb2Z0LmNvbS9QS0kvZG9jcy9DUFMvZGVmYXVsdC5odG0wQAYIKwYB -# BQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AUABvAGwAaQBjAHkAXwBTAHQAYQB0AGUA -# bQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAAfmiFEN4sbgmD+BcQM9naOh -# IW+z66bM9TG+zwXiqf76V20ZMLPCxWbJat/15/B4vceoniXj+bzta1RXCCtRgkQS -# +7lTjMz0YBKKdsxAQEGb3FwX/1z5Xhc1mCRWS3TvQhDIr79/xn/yN31aPxzymXlK -# kVIArzgPF/UveYFl2am1a+THzvbKegBvSzBEJCI8z+0DpZaPWSm8tv0E4XCfMkon -# /VWvL/625Y4zu2JfmttXQOnxzplmkIz/amJ/3cVKC5Em4jnsGUpxY517IW3DnKOi -# PPp/fZZqkHimbdLhnPkd/DjYlPTGpQqWhqS9nhquBEKDuLWAmyI4ILUl5WTs9/S/ -# fmNZJQ96LjlXdqJxqgaKD4kWumGnEcua2A5HmoDF0M2n0O99g/DhO3EJ3110mCII -# YdqwUB5vvfHhAN/nMQekkzr3ZUd46PioSKv33nJ+YWtvd6mBy6cJrDm77MbL2IK0 -# cs0d9LiFAR6A+xuJKlQ5slvayA1VmXqHczsI5pgt6o3gMy4SKfXAL1QnIffIrE7a -# KLixqduWsqdCosnPGUFN4Ib5KpqjEWYw07t0MkvfY3v1mYovG8chr1m1rtxEPJdQ -# cdeh0sVV42neV8HR3jDA/czmTfsNv11P6Z0eGTgvvM9YBS7vDaBQNdrvCScc1bN+ -# NR4Iuto229Nfj950iEkSoYICzzCCAjgCAQEwgfyhgdSkgdEwgc4xCzAJBgNVBAYT -# AlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYD -# VQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBP -# cGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4 -# OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vy -# dmljZaIjCgEBMAcGBSsOAwIaAxUADE5OKSMoNx/mYxYWap1RTOohbJ2ggYMwgYCk -# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH -# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD -# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIF -# AOPFChkwIhgPMjAyMTAyMDMxNTQwMDlaGA8yMDIxMDIwNDE1NDAwOVowdDA6Bgor -# BgEEAYRZCgQBMSwwKjAKAgUA48UKGQIBADAHAgEAAgIXmDAHAgEAAgIRyTAKAgUA -# 48ZbmQIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIBAAID -# B6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBAHeeznL2n6HWCjHH94Fl -# hcdW6TEXzq4XNgp1Gx1W9F8gJ4x+SwoV7elJZkwgGffcpHomLvIY/VSuzsl1NgtJ -# TWM2UxoqSv58BBOrl4eGhH6kkg8Ucy2tdeK5T8cHa8pMkq2j9pFd2mRG/6VMk0dl -# Xz7Uy3Z6bZqkcABMyAfuAaGbMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMCVVMx -# EzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoT -# FU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUt -# U3RhbXAgUENBIDIwMTACEzMAAAEsIq9Fl3X5G+4AAAAAASwwDQYJYIZIAWUDBAIB -# BQCgggFKMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQx -# IgQg/QYv7yp+354WTjWUIsXWndTEzXjaYjqwYjcBxCJKjdUwgfoGCyqGSIb3DQEJ -# EAIvMYHqMIHnMIHkMIG9BCBbn/0uFFh42hTM5XOoKdXevBaiSxmYK9Ilcn9nu5ZH -# 4TCBmDCBgKR+MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw -# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x -# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABLCKv -# RZd1+RvuAAAAAAEsMCIEIIfIM3YbzHswb/Kj/qq1l1cHA6QBl+gEXYanUNJomrpT -# MA0GCSqGSIb3DQEBCwUABIIBAAwdcXssUZGO7ho5+NHLjIxLtQk543aKGo+lrRMY -# Q9abE1h/AaaNJl0iGxX4IihNWyfovSfYL3L4eODUBAu68tWSxeceRfWNsb/ZZfUi -# v89hpLssI/Gf1BEgNMA4zCuIGQiC8okusVumEpAhhvCEbSiTTTtBdolTnU/CAKui -# oxaU3R9XkKh1F4oAM26+dJ1J2BLQXPs5afNvvedDsZWNQUPK1sFF3JRfzxiTrwBW -# EJRyflev9gyDoqCHzippgb+6+eti1WTkcA9Q49GIT11S6LOAVqkSC9N7Nqf8ksh8 -# ARdwT8jigpsm+mj7lrVU9upDkhVYhKeO8oiZq95Q53Zkteo= -# SIG # End signature block +# AQC0KDfaY50MDqsEGdlIzDHBd6CqIMRQWW9Af1LHDDTuFjfDsvna0nEuDSYJmNyz +# NB10jpbg0lhvkT1AzfX2TLITSXwS8D+mBzGCWMM/wTpciWBV/pbjSazbzoKvRrNo +# DV/u9omOM2Eawyo5JJJdNkM2d8qzkQ0bRuRd4HarmGunSouyb9NY7egWN5E5lUc3 +# a2AROzAdHdYpObpCOdeAY2P5XqtJkk79aROpzw16wCjdSn8qMzCBzR7rvH2WVkvF +# HLIxZQET1yhPb6lRmpgBQNnzidHV2Ocxjc8wNiIDzgbDkmlx54QPfw7RwQi8p1fy +# 4byhBrTjv568x8NGv3gwb0RbAgMBAAGjggFzMIIBbzAfBgNVHSUEGDAWBgorBgEE +# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQU8huhNbETDU+ZWllL4DNMPCijEU4w +# RQYDVR0RBD4wPKQ6MDgxHjAcBgNVBAsTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEW +# MBQGA1UEBRMNMjMwMDEyKzUwMjkyMzAfBgNVHSMEGDAWgBRIbmTlUAXTgqoXNzci +# tW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8vd3d3Lm1pY3Jvc29mdC5j +# b20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIwMTEtMDctMDguY3JsMGEG +# CCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDovL3d3dy5taWNyb3NvZnQu +# Y29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDExXzIwMTEtMDctMDguY3J0 +# MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIBAIjmD9IpQVvfB1QehvpC +# Ge7QeTQkKQ7j3bmDMjwSqFL4ri6ae9IFTdpywn5smmtSIyKYDn3/nHtaEn0X1NBj +# L5oP0BjAy1sqxD+uy35B+V8wv5GrxhMDJP8l2QjLtH/UglSTIhLqyt8bUAqVfyfp +# h4COMRvwwjTvChtCnUXXACuCXYHWalOoc0OU2oGN+mPJIJJxaNQc1sjBsMbGIWv3 +# cmgSHkCEmrMv7yaidpePt6V+yPMik+eXw3IfZ5eNOiNgL1rZzgSJfTnvUqiaEQ0X +# dG1HbkDv9fv6CTq6m4Ty3IzLiwGSXYxRIXTxT4TYs5VxHy2uFjFXWVSL0J2ARTYL +# E4Oyl1wXDF1PX4bxg1yDMfKPHcE1Ijic5lx1KdK1SkaEJdto4hd++05J9Bf9TAmi +# u6EK6C9Oe5vRadroJCK26uCUI4zIjL/qG7mswW+qT0CW0gnR9JHkXCWNbo8ccMk1 +# sJatmRoSAifbgzaYbUz8+lv+IXy5GFuAmLnNbGjacB3IMGpa+lbFgih57/fIhamq +# 5VhxgaEmn/UjWyr+cPiAFWuTVIpfsOjbEAww75wURNM1Imp9NJKye1O24EspEHmb +# DmqCUcq7NqkOKIG4PVm3hDDED/WQpzJDkvu4FrIbvyTGVU01vKsg4UfcdiZ0fQ+/ +# V0hf8yrtq9CkB8iIuk5bBxuPMIIHejCCBWKgAwIBAgIKYQ6Q0gAAAAAAAzANBgkq +# hkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24x +# EDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlv +# bjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +# IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEwOTA5WjB+MQswCQYDVQQG +# EwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwG +# A1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYDVQQDEx9NaWNyb3NvZnQg +# Q29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +# CgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+laUKq4BjgaBEm6f8MMHt03 +# a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc6Whe0t+bU7IKLMOv2akr +# rnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4Ddato88tt8zpcoRb0Rrrg +# OGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+lD3v++MrWhAfTVYoonpy +# 4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nkkDstrjNYxbc+/jLTswM9 +# sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6A4aN91/w0FK/jJSHvMAh +# dCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmdX4jiJV3TIUs+UsS1Vz8k +# A/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL5zmhD+kjSbwYuER8ReTB +# w3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zdsGbiwZeBe+3W7UvnSSmn +# Eyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3T8HhhUSJxAlMxdSlQy90 +# lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS4NaIjAsCAwEAAaOCAe0w +# ggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRIbmTlUAXTgqoXNzcitW2o +# ynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMCAYYwDwYD +# VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBDuRQFTuHqp8cx0SOJNDBa +# BgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2Ny +# bC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFfMDNfMjIuY3JsMF4GCCsG +# AQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3dy5taWNyb3NvZnQuY29t +# L3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFfMDNfMjIuY3J0MIGfBgNV +# HSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEFBQcCARYzaHR0cDovL3d3 +# dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1hcnljcHMuaHRtMEAGCCsG +# AQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkAYwB5AF8AcwB0AGEAdABl +# AG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn8oalmOBUeRou09h0ZyKb +# C5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7v0epo/Np22O/IjWll11l +# hJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0bpdS1HXeUOeLpZMlEPXh6 +# I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/KmtYSWMfCWluWpiW5IP0 +# wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvyCInWH8MyGOLwxS3OW560 +# STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBpmLJZiWhub6e3dMNABQam +# ASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJihsMdYzaXht/a8/jyFqGa +# J+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYbBL7fQccOKO7eZS/sl/ah +# XJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbSoqKfenoi+kiVH6v7RyOA +# 9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sLgOppO6/8MO0ETI7f33Vt +# Y5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtXcVZOSEXAQsmbdlsKgEhr +# /Xmfwb1tbWrJUnMTDXpQzTGCGiYwghoiAgEBMIGVMH4xCzAJBgNVBAYTAlVTMRMw +# EQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVN +# aWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNp +# Z25pbmcgUENBIDIwMTECEzMAAAQEbHQG/1crJ3IAAAAABAQwDQYJYIZIAWUDBAIB +# BQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQwHAYKKwYBBAGCNwIBCzEO +# MAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIL7Zm9jjqasUipeS7XNbT5Gz +# uhEwSf09z2Ab+694mR/3MEIGCisGAQQBgjcCAQwxNDAyoBSAEgBNAGkAYwByAG8A +# cwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20wDQYJKoZIhvcNAQEB +# BQAEggEAfTNcpMwgkFxkb0hBch2MCvTb1mGCFv8rZWTkR/aRZTyzuAIEb2GfL4qB +# rPycLC2+q4gaksj1Cv+mRTEq+ysl0aWbXgPiRNiijlnuWKRPZ4nlcGkeXu5zxJ1W +# uUOCIe03s6eJCUZseRZkNHB1/CqIlk/YB5yqB38cfq6ct+lWKoSCbSwRVh3Du6am +# jxnQRa4njduu1xywcKZYp9NGGeAgRDpMNbvFKF4Qf3krbTAn3vIVDBay6oeiHo2I +# x1RLrRC/CEYZ7oJ8tyc3SUE2/Jd00M4EKax+z3xTIkOmyMBZjEe1el92WVcUWukT +# ACoQjF5jPyXnfYGH7rjevjpI5u2T66GCF7AwghesBgorBgEEAYI3AwMBMYIXnDCC +# F5gGCSqGSIb3DQEHAqCCF4kwgheFAgEDMQ8wDQYJYIZIAWUDBAIBBQAwggFaBgsq +# hkiG9w0BCRABBKCCAUkEggFFMIIBQQIBAQYKKwYBBAGEWQoDATAxMA0GCWCGSAFl +# AwQCAQUABCBjHcYL0Rw5C6IE3Lyb3B0i9qsTzN6j8bzChm+bMp97RgIGZ2Ld17Jt +# GBMyMDI1MDExMjAwNDMxNy4yNTZaMASAAgH0oIHZpIHWMIHTMQswCQYDVQQGEwJV +# UzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UE +# ChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJl +# bGFuZCBPcGVyYXRpb25zIExpbWl0ZWQxJzAlBgNVBAsTHm5TaGllbGQgVFNTIEVT +# Tjo0MzFBLTA1RTAtRDk0NzElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAg +# U2VydmljZaCCEf4wggcoMIIFEKADAgECAhMzAAAB+vs7RNN3M8bTAAEAAAH6MA0G +# CSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9u +# MRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRp +# b24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMB4XDTI0 +# MDcyNTE4MzExMVoXDTI1MTAyMjE4MzExMVowgdMxCzAJBgNVBAYTAlVTMRMwEQYD +# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy +# b3NvZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9w +# ZXJhdGlvbnMgTGltaXRlZDEnMCUGA1UECxMeblNoaWVsZCBUU1MgRVNOOjQzMUEt +# MDVFMC1EOTQ3MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNl +# MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyhZVBM3PZcBfEpAf7fII +# hygwYVVP64USeZbSlRR3pvJebva0LQCDW45yOrtpwIpGyDGX+EbCbHhS5Td4J0Yl +# c83ztLEbbQD7M6kqR0Xj+n82cGse/QnMH0WRZLnwggJdenpQ6UciM4nMYZvdQjyb +# A4qejOe9Y073JlXv3VIbdkQH2JGyT8oB/LsvPL/kAnJ45oQIp7Sx57RPQ/0O6qay +# J2SJrwcjA8auMdAnZKOixFlzoooh7SyycI7BENHTpkVKrRV5YelRvWNTg1pH4EC2 +# KO2bxsBN23btMeTvZFieGIr+D8mf1lQQs0Ht/tMOVdah14t7Yk+xl5P4Tw3xfAGg +# Hsvsa6ugrxwmKTTX1kqXH5XCdw3TVeKCax6JV+ygM5i1NroJKwBCW11Pwi0z/ki9 +# 0ZeO6XfEE9mCnJm76Qcxi3tnW/Y/3ZumKQ6X/iVIJo7Lk0Z/pATRwAINqwdvzpdt +# X2hOJib4GR8is2bpKks04GurfweWPn9z6jY7GBC+js8pSwGewrffwgAbNKm82ZDF +# vqBGQQVJwIHSXpjkS+G39eyYOG2rcILBIDlzUzMFFJbNh5tDv3GeJ3EKvC4vNSAx +# tGfaG/mQhK43YjevsB72LouU78rxtNhuMXSzaHq5fFiG3zcsYHaa4+w+YmMrhTEz +# D4SAish35BjoXP1P1Ct4Va0CAwEAAaOCAUkwggFFMB0GA1UdDgQWBBRjjHKbL5WV +# 6kd06KocQHphK9U/vzAfBgNVHSMEGDAWgBSfpxVdAF5iXYP05dJlpxtTNRnpcjBf +# BgNVHR8EWDBWMFSgUqBQhk5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3Bz +# L2NybC9NaWNyb3NvZnQlMjBUaW1lLVN0YW1wJTIwUENBJTIwMjAxMCgxKS5jcmww +# bAYIKwYBBQUHAQEEYDBeMFwGCCsGAQUFBzAChlBodHRwOi8vd3d3Lm1pY3Jvc29m +# dC5jb20vcGtpb3BzL2NlcnRzL01pY3Jvc29mdCUyMFRpbWUtU3RhbXAlMjBQQ0El +# MjAyMDEwKDEpLmNydDAMBgNVHRMBAf8EAjAAMBYGA1UdJQEB/wQMMAoGCCsGAQUF +# BwMIMA4GA1UdDwEB/wQEAwIHgDANBgkqhkiG9w0BAQsFAAOCAgEAuFbCorFrvodG +# +ZNJH3Y+Nz5QpUytQVObOyYFrgcGrxq6MUa4yLmxN4xWdL1kygaW5BOZ3xBlPY7V +# puf5b5eaXP7qRq61xeOrX3f64kGiSWoRi9EJawJWCzJfUQRThDL4zxI2pYc1wnPp +# 7Q695bHqwZ02eaOBudh/IfEkGe0Ofj6IS3oyZsJP1yatcm4kBqIH6db1+weM4q46 +# NhAfAf070zF6F+IpUHyhtMbQg5+QHfOuyBzrt67CiMJSKcJ3nMVyfNlnv6yvttYz +# LK3wS+0QwJUibLYJMI6FGcSuRxKlq6RjOhK9L3QOjh0VCM11rHM11ZmN0euJbbBC +# VfQEufOLNkG88MFCUNE10SSbM/Og/CbTko0M5wbVvQJ6CqLKjtHSoeoAGPeeX24f +# 5cPYyTcKlbM6LoUdO2P5JSdI5s1JF/On6LiUT50adpRstZajbYEeX/N7RvSbkn0d +# jD3BvT2Of3Wf9gIeaQIHbv1J2O/P5QOPQiVo8+0AKm6M0TKOduihhKxAt/6Yyk17 +# Fv3RIdjT6wiL2qRIEsgOJp3fILw4mQRPu3spRfakSoQe5N0e4HWFf8WW2ZL0+c83 +# Qzh3VtEPI6Y2e2BO/eWhTYbIbHpqYDfAtAYtaYIde87ZymXG3MO2wUjhL9HvSQzj +# oquq+OoUmvfBUcB2e5L6QCHO6qTO7WowggdxMIIFWaADAgECAhMzAAAAFcXna54C +# m0mZAAAAAAAVMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UE +# CBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z +# b2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZp +# Y2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0yMTA5MzAxODIyMjVaFw0zMDA5MzAxODMy +# MjVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQH +# EwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNV +# BAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIICIjANBgkqhkiG9w0B +# AQEFAAOCAg8AMIICCgKCAgEA5OGmTOe0ciELeaLL1yR5vQ7VgtP97pwHB9KpbE51 +# yMo1V/YBf2xK4OK9uT4XYDP/XE/HZveVU3Fa4n5KWv64NmeFRiMMtY0Tz3cywBAY +# 6GB9alKDRLemjkZrBxTzxXb1hlDcwUTIcVxRMTegCjhuje3XD9gmU3w5YQJ6xKr9 +# cmmvHaus9ja+NSZk2pg7uhp7M62AW36MEBydUv626GIl3GoPz130/o5Tz9bshVZN +# 7928jaTjkY+yOSxRnOlwaQ3KNi1wjjHINSi947SHJMPgyY9+tVSP3PoFVZhtaDua +# Rr3tpK56KTesy+uDRedGbsoy1cCGMFxPLOJiss254o2I5JasAUq7vnGpF1tnYN74 +# kpEeHT39IM9zfUGaRnXNxF803RKJ1v2lIH1+/NmeRd+2ci/bfV+AutuqfjbsNkz2 +# K26oElHovwUDo9Fzpk03dJQcNIIP8BDyt0cY7afomXw/TNuvXsLz1dhzPUNOwTM5 +# TI4CvEJoLhDqhFFG4tG9ahhaYQFzymeiXtcodgLiMxhy16cg8ML6EgrXY28MyTZk +# i1ugpoMhXV8wdJGUlNi5UPkLiWHzNgY1GIRH29wb0f2y1BzFa/ZcUlFdEtsluq9Q +# BXpsxREdcu+N+VLEhReTwDwV2xo3xwgVGD94q0W29R6HXtqPnhZyacaue7e3Pmri +# Lq0CAwEAAaOCAd0wggHZMBIGCSsGAQQBgjcVAQQFAgMBAAEwIwYJKwYBBAGCNxUC +# BBYEFCqnUv5kxJq+gpE8RjUpzxD/LwTuMB0GA1UdDgQWBBSfpxVdAF5iXYP05dJl +# pxtTNRnpcjBcBgNVHSAEVTBTMFEGDCsGAQQBgjdMg30BATBBMD8GCCsGAQUFBwIB +# FjNodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL0RvY3MvUmVwb3NpdG9y +# eS5odG0wEwYDVR0lBAwwCgYIKwYBBQUHAwgwGQYJKwYBBAGCNxQCBAweCgBTAHUA +# YgBDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU +# 1fZWy4/oolxiaNE9lJBb186aGMQwVgYDVR0fBE8wTTBLoEmgR4ZFaHR0cDovL2Ny +# bC5taWNyb3NvZnQuY29tL3BraS9jcmwvcHJvZHVjdHMvTWljUm9vQ2VyQXV0XzIw +# MTAtMDYtMjMuY3JsMFoGCCsGAQUFBwEBBE4wTDBKBggrBgEFBQcwAoY+aHR0cDov +# L3d3dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXRfMjAxMC0w +# Ni0yMy5jcnQwDQYJKoZIhvcNAQELBQADggIBAJ1VffwqreEsH2cBMSRb4Z5yS/yp +# b+pcFLY+TkdkeLEGk5c9MTO1OdfCcTY/2mRsfNB1OW27DzHkwo/7bNGhlBgi7ulm +# ZzpTTd2YurYeeNg2LpypglYAA7AFvonoaeC6Ce5732pvvinLbtg/SHUB2RjebYIM +# 9W0jVOR4U3UkV7ndn/OOPcbzaN9l9qRWqveVtihVJ9AkvUCgvxm2EhIRXT0n4ECW +# OKz3+SmJw7wXsFSFQrP8DJ6LGYnn8AtqgcKBGUIZUnWKNsIdw2FzLixre24/LAl4 +# FOmRsqlb30mjdAy87JGA0j3mSj5mO0+7hvoyGtmW9I/2kQH2zsZ0/fZMcm8Qq3Uw +# xTSwethQ/gpY3UA8x1RtnWN0SCyxTkctwRQEcb9k+SS+c23Kjgm9swFXSVRk2XPX +# fx5bRAGOWhmRaw2fpCjcZxkoJLo4S5pu+yFUa2pFEUep8beuyOiJXk+d0tBMdrVX +# VAmxaQFEfnyhYWxz/gq77EFmPWn9y8FBSX5+k77L+DvktxW/tM4+pTFRhLy/AsGC +# onsXHRWJjXD+57XQKBqJC4822rpM+Zv/Cuk0+CQ1ZyvgDbjmjJnW4SLq8CdCPSWU +# 5nR0W2rRnj7tfqAxM328y+l7vzhwRNGQ8cirOoo6CGJ/2XBjU02N7oJtpQUQwXEG +# ahC0HVUzWLOhcGbyoYIDWTCCAkECAQEwggEBoYHZpIHWMIHTMQswCQYDVQQGEwJV +# UzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UE +# ChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJl +# bGFuZCBPcGVyYXRpb25zIExpbWl0ZWQxJzAlBgNVBAsTHm5TaGllbGQgVFNTIEVT +# Tjo0MzFBLTA1RTAtRDk0NzElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAg +# U2VydmljZaIjCgEBMAcGBSsOAwIaAxUA94Z+bUJn+nKwBvII6sg0Ny7aPDaggYMw +# gYCkfjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE +# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYD +# VQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQsF +# AAIFAOss/ykwIhgPMjAyNTAxMTExNDMxMDVaGA8yMDI1MDExMjE0MzEwNVowdzA9 +# BgorBgEEAYRZCgQBMS8wLTAKAgUA6yz/KQIBADAKAgEAAgIpggIB/zAHAgEAAgIT +# XjAKAgUA6y5QqQIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAow +# CAIBAAIDB6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBCwUAA4IBAQCHE6DSGdY4 +# KF25iAsxQP9F9Lz6ye/vrWGv+j0aSzSbjHVM3kMcEmX9278XgAKgAYII/f16uDtE +# 7VlEwnKGXujGF249I864U50QFt9hIxqCeuvrshDq8a4Q4KVmuDTosYjS114IJeBK +# LMOBRgLQCIC+wmvdP4EeYH1tnMIEASFvptE+XBro44/A5pmx5UiDJRL1AG4+aO3x +# 13psQu7H3thmbGy7Sf0Azjx0PZ+1QUVI7jWNk9DWjGd18G4SQD8Uxeh0v73/dQx1 +# XsFhsyvnrw6uUrxkoAdurif9kyKS+ppo4j9ZkPXzzuc95s1bPcPAyjXCu07Tlunj +# sXttGVEPQIeXMYIEDTCCBAkCAQEwgZMwfDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +# Cldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29m +# dCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENB +# IDIwMTACEzMAAAH6+ztE03czxtMAAQAAAfowDQYJYIZIAWUDBAIBBQCgggFKMBoG +# CSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQxIgQgxenDb/df +# q8XJS+q7Oxyca1ryDMmDRA0I3mtr+xYHGZQwgfoGCyqGSIb3DQEJEAIvMYHqMIHn +# MIHkMIG9BCB98n8tya8+B2jjU/dpJRIwHwHHpco5ogNStYocbkOeVjCBmDCBgKR+ +# MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS +# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMT +# HU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAAB+vs7RNN3M8bTAAEA +# AAH6MCIEIC8gtQ6HRW7jzwlpg15qoYopXwF01KaO1EM5tYzqJwx/MA0GCSqGSIb3 +# DQEBCwUABIICAIsSn8x3zVS870Zf4pa+jfZjdOq++5dHpeLg46sujQ3w+xj3RyhB +# nRa3kjWyU9nNF6hrt0Q+ILOxUt3jCd3hbB1ZuspwbXdoRtRLfuLPvGiSmINdgFR4 +# LD/jXLrq9USAHYXHzhuYhaVLIpn7M87TbFuGFVaByjmohZRcPCE8y8b7/RIlGm7B +# wgx0thZA4lHWFyj8j7CwjmueOJSSZ9an4P9VHFKJ63kYub4J1VxbeApGAeeS32SD +# oI3zDdC+iI+IetR9BUHGcR3Vg7j7c0T+NcrIoPPNb4Ff90Ue24h5RDJMQWrM56ak +# VEWgVlzhf8CeyeO7/ButBUZu8VLkH0DQraK9UKptZFKOXMELoi/oZL6IJftHp5vU +# +sPpF3NuuXw8Z5eL9jZ7A1y+H7nMhdXP2pojHDN213VZqeoUoOZlbFl6spDF1hFP +# 44Fu7TPGEwUNS213Pwln2SJ8SayeVUxsreo4pTvhDl/xZ+B7WNuLL7hatWFGrcf3 +# w/HiVCoTfsY49SaN6zAK6akS3KI6KZHfzjaxDw+4LHo8gL68Ik1HZe4W1jaLYaED +# LWvKIinaH2vwU0J4a+oX+64eSh0tI9Ef3aM6jn9LgqubY36TzptUTWcsM3vv3YGB +# Dnf7LPxSt4/s5bUgAHvkWTjESdtIbt6Pxqz4BRha+ckPYBj968t3mSh6 +# SIG # End signature block \ No newline at end of file diff --git a/src/Misc/dotnet-install.sh b/src/Misc/dotnet-install.sh index 8ffe16955..924bdc2a8 100755 --- a/src/Misc/dotnet-install.sh +++ b/src/Misc/dotnet-install.sh @@ -11,7 +11,7 @@ set -u # This is causing it to fail set -o pipefail -# Use in the the functions: eval $invocation +# Use in the functions: eval $invocation invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"' # standard output may be used as a return value in the functions @@ -24,7 +24,7 @@ exec 3>&1 # See if stdout is a terminal if [ -t 1 ] && command -v tput > /dev/null; then # see if it supports colors - ncolors=$(tput colors) + ncolors=$(tput colors || echo 0) if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then bold="$(tput bold || echo)" normal="$(tput sgr0 || echo)" @@ -135,6 +135,31 @@ get_legacy_os_name_from_platform() { return 1 } +get_legacy_os_name() { + eval $invocation + + local uname=$(uname) + if [ "$uname" = "Darwin" ]; then + echo "osx" + return 0 + elif [ -n "$runtime_id" ]; then + echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}") + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "") + if [ -n "$os" ]; then + echo "$os" + return 0 + fi + fi + fi + + say_verbose "Distribution specific OS name and version could not be detected: UName = $uname" + return 1 +} + get_linux_platform_name() { eval $invocation @@ -174,8 +199,8 @@ get_current_os_name() { echo "freebsd" return 0 elif [ "$uname" = "Linux" ]; then - local linux_platform_name - linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; } + local linux_platform_name="" + linux_platform_name="$(get_linux_platform_name)" || true if [ "$linux_platform_name" = "rhel.6" ]; then echo $linux_platform_name @@ -196,39 +221,13 @@ get_current_os_name() { return 1 } -get_legacy_os_name() { - eval $invocation - - local uname=$(uname) - if [ "$uname" = "Darwin" ]; then - echo "osx" - return 0 - elif [ -n "$runtime_id" ]; then - echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}") - return 0 - else - if [ -e /etc/os-release ]; then - . /etc/os-release - os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "") - if [ -n "$os" ]; then - echo "$os" - return 0 - fi - fi - fi - - say_verbose "Distribution specific OS name and version could not be detected: UName = $uname" - return 1 -} - machine_has() { eval $invocation - hash "$1" > /dev/null 2>&1 + command -v "$1" > /dev/null 2>&1 return $? } - check_min_reqs() { local hasMinimum=false if machine_has "curl"; then @@ -299,14 +298,43 @@ get_machine_architecture() { if command -v uname > /dev/null; then CPUName=$(uname -m) case $CPUName in + armv1*|armv2*|armv3*|armv4*|armv5*|armv6*) + echo "armv6-or-below" + return 0 + ;; armv*l) echo "arm" return 0 ;; aarch64|arm64) + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + echo "arm" + return 0 + fi echo "arm64" return 0 ;; + s390x) + echo "s390x" + return 0 + ;; + ppc64le) + echo "ppc64le" + return 0 + ;; + loongarch64) + echo "loongarch64" + return 0 + ;; + riscv64) + echo "riscv64" + return 0 + ;; + powerpc|ppc) + echo "ppc" + return 0 + ;; esac fi @@ -321,11 +349,19 @@ get_normalized_architecture_from_architecture() { eval $invocation local architecture="$(to_lowercase "$1")" + + if [[ $architecture == \ ]]; then + machine_architecture="$(get_machine_architecture)" + if [[ "$machine_architecture" == "armv6-or-below" ]]; then + say_err "Architecture \`$machine_architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" + return 1 + fi + + echo $machine_architecture + return 0 + fi + case "$architecture" in - \) - echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")" - return 0 - ;; amd64|x64) echo "x64" return 0 @@ -338,12 +374,72 @@ get_normalized_architecture_from_architecture() { echo "arm64" return 0 ;; + s390x) + echo "s390x" + return 0 + ;; + ppc64le) + echo "ppc64le" + return 0 + ;; + loongarch64) + echo "loongarch64" + return 0 + ;; esac say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" return 1 } +# args: +# version - $1 +# channel - $2 +# architecture - $3 +get_normalized_architecture_for_specific_sdk_version() { + eval $invocation + + local is_version_support_arm64="$(is_arm64_supported "$1")" + local is_channel_support_arm64="$(is_arm64_supported "$2")" + local architecture="$3"; + local osname="$(get_current_os_name)" + + if [ "$osname" == "osx" ] && [ "$architecture" == "arm64" ] && { [ "$is_version_support_arm64" = false ] || [ "$is_channel_support_arm64" = false ]; }; then + #check if rosetta is installed + if [ "$(/usr/bin/pgrep oahd >/dev/null 2>&1;echo $?)" -eq 0 ]; then + say_verbose "Changing user architecture from '$architecture' to 'x64' because .NET SDKs prior to version 6.0 do not support arm64." + echo "x64" + return 0; + else + say_err "Architecture \`$architecture\` is not supported for .NET SDK version \`$version\`. Please install Rosetta to allow emulation of the \`$architecture\` .NET SDK on this platform" + return 1 + fi + fi + + echo "$architecture" + return 0 +} + +# args: +# version or channel - $1 +is_arm64_supported() { + # Extract the major version by splitting on the dot + major_version="${1%%.*}" + + # Check if the major version is a valid number and less than 6 + case "$major_version" in + [0-9]*) + if [ "$major_version" -lt 6 ]; then + echo false + return 0 + fi + ;; + esac + + echo true + return 0 +} + # args: # user_defined_os - $1 get_normalized_os() { @@ -356,8 +452,13 @@ get_normalized_os() { echo "$osname" return 0 ;; + macos) + osname='osx' + echo "$osname" + return 0 + ;; *) - say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." + say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, macos, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." return 1 ;; esac @@ -368,6 +469,88 @@ get_normalized_os() { return 0 } +# args: +# quality - $1 +get_normalized_quality() { + eval $invocation + + local quality="$(to_lowercase "$1")" + if [ ! -z "$quality" ]; then + case "$quality" in + daily | signed | validated | preview) + echo "$quality" + return 0 + ;; + ga) + #ga quality is available without specifying quality, so normalizing it to empty + return 0 + ;; + *) + say_err "'$quality' is not a supported value for --quality option. Supported values are: daily, signed, validated, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." + return 1 + ;; + esac + fi + return 0 +} + +# args: +# channel - $1 +get_normalized_channel() { + eval $invocation + + local channel="$(to_lowercase "$1")" + + if [[ $channel == current ]]; then + say_warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.' + fi + + if [[ $channel == release/* ]]; then + say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.'; + fi + + if [ ! -z "$channel" ]; then + case "$channel" in + lts) + echo "LTS" + return 0 + ;; + sts) + echo "STS" + return 0 + ;; + current) + echo "STS" + return 0 + ;; + *) + echo "$channel" + return 0 + ;; + esac + fi + + return 0 +} + +# args: +# runtime - $1 +get_normalized_product() { + eval $invocation + + local product="" + local runtime="$(to_lowercase "$1")" + if [[ "$runtime" == "dotnet" ]]; then + product="dotnet-runtime" + elif [[ "$runtime" == "aspnetcore" ]]; then + product="aspnetcore-runtime" + elif [ -z "$runtime" ]; then + product="dotnet-sdk" + fi + echo "$product" + return 0 +} + # The version text returned from the feeds is a 1-line or 2-line string: # For the SDK and the dotnet runtime (2 lines): # Line 1: # commit_hash @@ -377,7 +560,7 @@ get_normalized_os() { # args: # version_text - stdin -get_version_from_version_info() { +get_version_from_latestversion_file_content() { eval $invocation cat | tail -n 1 | sed 's/\r$//' @@ -405,11 +588,45 @@ is_dotnet_package_installed() { fi } +# args: +# downloaded file - $1 +# remote_file_size - $2 +validate_remote_local_file_sizes() +{ + eval $invocation + + local downloaded_file="$1" + local remote_file_size="$2" + local file_size='' + + if [[ "$OSTYPE" == "linux-gnu"* ]]; then + file_size="$(stat -c '%s' "$downloaded_file")" + elif [[ "$OSTYPE" == "darwin"* ]]; then + # hardcode in order to avoid conflicts with GNU stat + file_size="$(/usr/bin/stat -f '%z' "$downloaded_file")" + fi + + if [ -n "$file_size" ]; then + say "Downloaded file size is $file_size bytes." + + if [ -n "$remote_file_size" ] && [ -n "$file_size" ]; then + if [ "$remote_file_size" -ne "$file_size" ]; then + say "The remote and local file sizes are not equal. The remote file size is $remote_file_size bytes and the local size is $file_size bytes. The local package may be corrupted." + else + say "The remote and local file sizes are equal." + fi + fi + + else + say "Either downloaded or local package size can not be measured. One of them may be corrupted." + fi +} + # args: # azure_feed - $1 # channel - $2 # normalized_architecture - $3 -get_latest_version_info() { +get_version_from_latestversion_file() { eval $invocation local azure_feed="$1" @@ -418,24 +635,24 @@ get_latest_version_info() { local version_file_url=null if [[ "$runtime" == "dotnet" ]]; then - version_file_url="$uncached_feed/Runtime/$channel/latest.version" + version_file_url="$azure_feed/Runtime/$channel/latest.version" elif [[ "$runtime" == "aspnetcore" ]]; then - version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version" + version_file_url="$azure_feed/aspnetcore/Runtime/$channel/latest.version" elif [ -z "$runtime" ]; then - version_file_url="$uncached_feed/Sdk/$channel/latest.version" + version_file_url="$azure_feed/Sdk/$channel/latest.version" else say_err "Invalid value for \$runtime" return 1 fi - say_verbose "get_latest_version_info: latest url: $version_file_url" + say_verbose "get_version_from_latestversion_file: latest url: $version_file_url" - download "$version_file_url" - return $? + download "$version_file_url" || return $? + return 0 } # args: # json_file - $1 -parse_jsonfile_for_version() { +parse_globaljson_file_for_version() { eval $invocation local json_file="$1" @@ -444,7 +661,7 @@ parse_jsonfile_for_version() { return 1 fi - sdk_section=$(cat $json_file | awk '/"sdk"/,/}/') + sdk_section=$(cat $json_file | tr -d "\r" | awk '/"sdk"/,/}/') if [ -z "$sdk_section" ]; then say_err "Unable to parse the SDK node in \`$json_file\`" return 1 @@ -491,9 +708,9 @@ get_specific_version_from_version() { if [ -z "$json_file" ]; then if [[ "$version" == "latest" ]]; then local version_info - version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1 + version_info="$(get_version_from_latestversion_file "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1 say_verbose "get_specific_version_from_version: version_info=$version_info" - echo "$version_info" | get_version_from_version_info + echo "$version_info" | get_version_from_latestversion_file_content return 0 else echo "$version" @@ -501,7 +718,7 @@ get_specific_version_from_version() { fi else local version_info - version_info="$(parse_jsonfile_for_version "$json_file")" || return 1 + version_info="$(parse_globaljson_file_for_version "$json_file")" || return 1 echo "$version_info" return 0 fi @@ -541,43 +758,133 @@ construct_download_link() { # args: # azure_feed - $1 # specific_version - $2 +# download link - $3 (optional) get_specific_product_version() { # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents # to resolve the version of what's in the folder, superseding the specified version. + # if 'productVersion.txt' is missing but download link is already available, product version will be taken from download link eval $invocation local azure_feed="$1" local specific_version="${2//[$'\t\r\n']}" - local specific_product_version=$specific_version + local package_download_link="" + if [ $# -gt 2 ]; then + local package_download_link="$3" + fi + local specific_product_version=null + + # Try to get the version number, using the productVersion.txt file located next to the installer file. + local download_links=($(get_specific_product_version_url "$azure_feed" "$specific_version" true "$package_download_link") + $(get_specific_product_version_url "$azure_feed" "$specific_version" false "$package_download_link")) + + for download_link in "${download_links[@]}" + do + say_verbose "Checking for the existence of $download_link" + + if machine_has "curl" + then + if ! specific_product_version=$(curl -s --fail "${download_link}${feed_credential}" 2>&1); then + continue + else + echo "${specific_product_version//[$'\t\r\n']}" + return 0 + fi + + elif machine_has "wget" + then + specific_product_version=$(wget -qO- "${download_link}${feed_credential}" 2>&1) + if [ $? = 0 ]; then + echo "${specific_product_version//[$'\t\r\n']}" + return 0 + fi + fi + done + + # Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number. + say_verbose "Failed to get the version using productVersion.txt file. Download link will be parsed instead." + specific_product_version="$(get_product_specific_version_from_download_link "$package_download_link" "$specific_version")" + echo "${specific_product_version//[$'\t\r\n']}" + return 0 +} + +# args: +# azure_feed - $1 +# specific_version - $2 +# is_flattened - $3 +# download link - $4 (optional) +get_specific_product_version_url() { + eval $invocation + + local azure_feed="$1" + local specific_version="$2" + local is_flattened="$3" + local package_download_link="" + if [ $# -gt 3 ]; then + local package_download_link="$4" + fi + + local pvFileName="productVersion.txt" + if [ "$is_flattened" = true ]; then + if [ -z "$runtime" ]; then + pvFileName="sdk-productVersion.txt" + elif [[ "$runtime" == "dotnet" ]]; then + pvFileName="runtime-productVersion.txt" + else + pvFileName="$runtime-productVersion.txt" + fi + fi local download_link=null - if [[ "$runtime" == "dotnet" ]]; then - download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}" - elif [[ "$runtime" == "aspnetcore" ]]; then - download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}" - elif [ -z "$runtime" ]; then - download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}" + + if [ -z "$package_download_link" ]; then + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/${pvFileName}" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/${pvFileName}" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/${pvFileName}" + else + return 1 + fi else - return 1 + download_link="${package_download_link%/*}/${pvFileName}" fi - if machine_has "curl" - then - specific_product_version=$(curl -s --fail "$download_link") - if [ $? -ne 0 ] - then - specific_product_version=$specific_version - fi - elif machine_has "wget" - then - specific_product_version=$(wget -qO- "$download_link") - if [ $? -ne 0 ] - then - specific_product_version=$specific_version - fi - fi - specific_product_version="${specific_product_version//[$'\t\r\n']}" + say_verbose "Constructed productVersion link: $download_link" + echo "$download_link" + return 0 +} +# args: +# download link - $1 +# specific version - $2 +get_product_specific_version_from_download_link() +{ + eval $invocation + + local download_link="$1" + local specific_version="$2" + local specific_product_version="" + + if [ -z "$download_link" ]; then + echo "$specific_version" + return 0 + fi + + #get filename + filename="${download_link##*/}" + + #product specific version follows the product name + #for filename 'dotnet-sdk-3.1.404-linux-x64.tar.gz': the product version is 3.1.404 + IFS='-' + read -ra filename_elems <<< "$filename" + count=${#filename_elems[@]} + if [[ "$count" -gt 2 ]]; then + specific_product_version="${filename_elems[2]}" + else + specific_product_version=$specific_version + fi + unset IFS; echo "$specific_product_version" return 0 } @@ -649,6 +956,37 @@ get_absolute_path() { return 0 } +# args: +# override - $1 (boolean, true or false) +get_cp_options() { + eval $invocation + + local override="$1" + local override_switch="" + + if [ "$override" = false ]; then + override_switch="-n" + + # create temporary files to check if 'cp -u' is supported + tmp_dir="$(mktemp -d)" + tmp_file="$tmp_dir/testfile" + tmp_file2="$tmp_dir/testfile2" + + touch "$tmp_file" + + # use -u instead of -n if it's available + if cp -u "$tmp_file" "$tmp_file2" 2>/dev/null; then + override_switch="-u" + fi + + # clean up + rm -f "$tmp_file" "$tmp_file2" + rm -rf "$tmp_dir" + fi + + echo "$override_switch" +} + # args: # input_files - stdin # root_path - $1 @@ -660,15 +998,7 @@ copy_files_or_dirs_from_list() { local root_path="$(remove_trailing_slash "$1")" local out_path="$(remove_trailing_slash "$2")" local override="$3" - local osname="$(get_current_os_name)" - local override_switch=$( - if [ "$override" = false ]; then - if [ "$osname" = "linux-musl" ]; then - printf -- "-u"; - else - printf -- "-n"; - fi - fi) + local override_switch="$(get_cp_options "$override")" cat | uniq | while read -r file_path; do local path="$(remove_beginning_slash "${file_path#$root_path}")" @@ -683,14 +1013,39 @@ copy_files_or_dirs_from_list() { done } +# args: +# zip_uri - $1 +get_remote_file_size() { + local zip_uri="$1" + + if machine_has "curl"; then + file_size=$(curl -sI "$zip_uri" | grep -i content-length | awk '{ num = $2 + 0; print num }') + elif machine_has "wget"; then + file_size=$(wget --spider --server-response -O /dev/null "$zip_uri" 2>&1 | grep -i 'Content-Length:' | awk '{ num = $2 + 0; print num }') + else + say "Neither curl nor wget is available on this system." + return + fi + + if [ -n "$file_size" ]; then + say "Remote file $zip_uri size is $file_size bytes." + echo "$file_size" + else + say_verbose "Content-Length header was not extracted for $zip_uri." + echo "" + fi +} + # args: # zip_path - $1 # out_path - $2 +# remote_file_size - $3 extract_dotnet_package() { eval $invocation local zip_path="$1" local out_path="$2" + local remote_file_size="$3" local temp_out_path="$(mktemp -d "$temporary_file_template")" @@ -700,9 +1055,13 @@ extract_dotnet_package() { local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/' find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files" - + + validate_remote_local_file_sizes "$zip_path" "$remote_file_size" + rm -rf "$temp_out_path" - rm -f "$zip_path" && say_verbose "Temporary zip file $zip_path was removed" + if [ -z ${keep_zip+x} ]; then + rm -f "$zip_path" && say_verbose "Temporary archive file $zip_path was removed" + fi if [ "$failed" = true ]; then say_err "Extraction failed" @@ -711,22 +1070,75 @@ extract_dotnet_package() { return 0 } -get_http_header_curl() { +# args: +# remote_path - $1 +# disable_feed_credential - $2 +get_http_header() +{ eval $invocation local remote_path="$1" - remote_path_with_credential="${remote_path}${feed_credential}" - curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 " - curl $curl_options "$remote_path_with_credential" || return 1 + local disable_feed_credential="$2" + + local failed=false + local response + if machine_has "curl"; then + get_http_header_curl $remote_path $disable_feed_credential || failed=true + elif machine_has "wget"; then + get_http_header_wget $remote_path $disable_feed_credential || failed=true + else + failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Failed to get HTTP header: '$remote_path'." + return 1 + fi return 0 } +# args: +# remote_path - $1 +# disable_feed_credential - $2 +get_http_header_curl() { + eval $invocation + local remote_path="$1" + local disable_feed_credential="$2" + + remote_path_with_credential="$remote_path" + if [ "$disable_feed_credential" = false ]; then + remote_path_with_credential+="$feed_credential" + fi + + curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 " + curl $curl_options "$remote_path_with_credential" 2>&1 || return 1 + return 0 +} + +# args: +# remote_path - $1 +# disable_feed_credential - $2 get_http_header_wget() { eval $invocation local remote_path="$1" - remote_path_with_credential="${remote_path}${feed_credential}" - wget_options="-q -S --spider --tries 5 --waitretry 2 --connect-timeout 15 " - wget $wget_options "$remote_path_with_credential" 2>&1 || return 1 - return 0 + local disable_feed_credential="$2" + local wget_options="-q -S --spider --tries 5 " + + local wget_options_extra='' + + # Test for options that aren't supported on all wget implementations. + if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then + wget_options_extra="--waitretry 2 --connect-timeout 15 " + else + say "wget extra options are unavailable for this environment" + fi + + remote_path_with_credential="$remote_path" + if [ "$disable_feed_credential" = false ]; then + remote_path_with_credential+="$feed_credential" + fi + + wget $wget_options $wget_options_extra "$remote_path_with_credential" 2>&1 + + return $? } # args: @@ -763,11 +1175,9 @@ download() { say "Download attempt #$attempts has failed: $http_code $download_error_msg" say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds." - sleep $((attempts*20)) + sleep $((attempts*10)) done - - if [ "$failed" = true ]; then say_verbose "Download failed: $remote_path" return 1 @@ -783,20 +1193,30 @@ downloadcurl() { local remote_path="$1" local out_path="${2:-}" # Append feed_credential as late as possible before calling curl to avoid logging feed_credential + # Avoid passing URI with credentials to functions: note, most of them echoing parameters of invocation in verbose output. local remote_path_with_credential="${remote_path}${feed_credential}" local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs " - local failed=false + local curl_exit_code=0; if [ -z "$out_path" ]; then - curl $curl_options "$remote_path_with_credential" || failed=true + curl $curl_options "$remote_path_with_credential" 2>&1 + curl_exit_code=$? else - curl $curl_options -o "$out_path" "$remote_path_with_credential" || failed=true + curl $curl_options -o "$out_path" "$remote_path_with_credential" 2>&1 + curl_exit_code=$? fi - if [ "$failed" = true ]; then - local response=$(get_http_header_curl $remote_path_with_credential) - http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 ) + + if [ $curl_exit_code -gt 0 ]; then download_error_msg="Unable to download $remote_path." - if [[ $http_code != 2* ]]; then - download_error_msg+=" Returned HTTP status code: $http_code." + # Check for curl timeout codes + if [[ $curl_exit_code == 7 || $curl_exit_code == 28 ]]; then + download_error_msg+=" Failed to reach the server: connection timeout." + else + local disable_feed_credential=false + local response=$(get_http_header_curl $remote_path $disable_feed_credential) + http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 ) + if [[ ! -z $http_code && $http_code != 2* ]]; then + download_error_msg+=" Returned HTTP status code: $http_code." + fi fi say_verbose "$download_error_msg" return 1 @@ -814,64 +1234,367 @@ downloadwget() { local out_path="${2:-}" # Append feed_credential as late as possible before calling wget to avoid logging feed_credential local remote_path_with_credential="${remote_path}${feed_credential}" - local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 " - local failed=false - if [ -z "$out_path" ]; then - wget -q $wget_options -O - "$remote_path_with_credential" || failed=true + local wget_options="--tries 20 " + + local wget_options_extra='' + local wget_result='' + + # Test for options that aren't supported on all wget implementations. + if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then + wget_options_extra="--waitretry 2 --connect-timeout 15 " else - wget $wget_options -O "$out_path" "$remote_path_with_credential" || failed=true + say "wget extra options are unavailable for this environment" fi - if [ "$failed" = true ]; then - local response=$(get_http_header_wget $remote_path_with_credential) + + if [ -z "$out_path" ]; then + wget -q $wget_options $wget_options_extra -O - "$remote_path_with_credential" 2>&1 + wget_result=$? + else + wget $wget_options $wget_options_extra -O "$out_path" "$remote_path_with_credential" 2>&1 + wget_result=$? + fi + + if [[ $wget_result != 0 ]]; then + local disable_feed_credential=false + local response=$(get_http_header_wget $remote_path $disable_feed_credential) http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 ) download_error_msg="Unable to download $remote_path." - if [[ $http_code != 2* ]]; then + if [[ ! -z $http_code && $http_code != 2* ]]; then download_error_msg+=" Returned HTTP status code: $http_code." + # wget exit code 4 stands for network-issue + elif [[ $wget_result == 4 ]]; then + download_error_msg+=" Failed to reach the server: connection timeout." fi say_verbose "$download_error_msg" return 1 fi + return 0 } +extract_stem() { + local url="$1" + # extract the protocol + proto="$(echo $1 | grep :// | sed -e's,^\(.*://\).*,\1,g')" + # remove the protocol + url="${1/$proto/}" + # extract the path (if any) - since we know all of our feeds have a first path segment, we can skip the first one. otherwise we'd use -f2- to get the full path + full_path="$(echo $url | grep / | cut -d/ -f2-)" + path="$(echo $full_path | cut -d/ -f2-)" + echo $path +} + +check_url_exists() { + eval $invocation + local url="$1" + + local code="" + if machine_has "curl" + then + code=$(curl --head -o /dev/null -w "%{http_code}" -s --fail "$url"); + elif machine_has "wget" + then + # get the http response, grab the status code + server_response=$(wget -qO- --method=HEAD --server-response "$url" 2>&1) + code=$(echo "$server_response" | grep "HTTP/" | awk '{print $2}') + fi + if [ $code = "200" ]; then + return 0 + else + return 1 + fi +} + +sanitize_redirect_url() { + eval $invocation + + local url_stem + url_stem=$(extract_stem "$1") + say_verbose "Checking configured feeds for the asset at ${yellow:-}$url_stem${normal:-}" + + for feed in "${feeds[@]}" + do + local trial_url="$feed/$url_stem" + say_verbose "Checking ${yellow:-}$trial_url${normal:-}" + if check_url_exists "$trial_url"; then + say_verbose "Found a match at ${yellow:-}$trial_url${normal:-}" + echo "$trial_url" + return 0 + else + say_verbose "No match at ${yellow:-}$trial_url${normal:-}" + fi + done + return 1 +} + +get_download_link_from_aka_ms() { + eval $invocation + + #quality is not supported for LTS or STS channel + #STS maps to current + if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "STS") ]]; then + normalized_quality="" + say_warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored." + fi + + say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'." + + #construct aka.ms link + aka_ms_link="https://aka.ms/dotnet" + if [ "$internal" = true ]; then + aka_ms_link="$aka_ms_link/internal" + fi + aka_ms_link="$aka_ms_link/$normalized_channel" + if [[ ! -z "$normalized_quality" ]]; then + aka_ms_link="$aka_ms_link/$normalized_quality" + fi + aka_ms_link="$aka_ms_link/$normalized_product-$normalized_os-$normalized_architecture.tar.gz" + say_verbose "Constructed aka.ms link: '$aka_ms_link'." + + #get HTTP response + #do not pass credentials as a part of the $aka_ms_link and do not apply credentials in the get_http_header function + #otherwise the redirect link would have credentials as well + #it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link + disable_feed_credential=true + response="$(get_http_header $aka_ms_link $disable_feed_credential)" + + say_verbose "Received response: $response" + # Get results of all the redirects. + http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' ) + # They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404). + broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' ) + # The response may end without final code 2xx/4xx/5xx somehow, e.g. network restrictions on www.bing.com causes redirecting to bing.com fails with connection refused. + # In this case it should not exclude the last. + last_http_code=$( echo "$http_codes" | tail -n 1 ) + if ! [[ $last_http_code =~ ^(2|4|5)[0-9][0-9]$ ]]; then + broken_redirects=$( echo "$http_codes" | grep -v '301' ) + fi + + # All HTTP codes are 301 (Moved Permanently), the redirect link exists. + if [[ -z "$broken_redirects" ]]; then + aka_ms_download_link=$( echo "$response" | awk '$1 ~ /^Location/{print $2}' | tail -1 | tr -d '\r') + + if [[ -z "$aka_ms_download_link" ]]; then + say_verbose "The aka.ms link '$aka_ms_link' is not valid: failed to get redirect location." + return 1 + fi + + sanitized_redirect_url=$(sanitize_redirect_url "$aka_ms_download_link") + if [[ -n "$sanitized_redirect_url" ]]; then + aka_ms_download_link="$sanitized_redirect_url" + fi + + say_verbose "The redirect location retrieved: '$aka_ms_download_link'." + return 0 + else + say_verbose "The aka.ms link '$aka_ms_link' is not valid: received HTTP code: $(echo "$broken_redirects" | paste -sd "," -)." + return 1 + fi +} + +get_feeds_to_use() +{ + feeds=( + "https://builds.dotnet.microsoft.com/dotnet" + "https://ci.dot.net/public" + ) + + if [[ -n "$azure_feed" ]]; then + feeds=("$azure_feed") + fi + + if [[ -n "$uncached_feed" ]]; then + feeds=("$uncached_feed") + fi +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed). +generate_download_links() { + + download_links=() + specific_versions=() + effective_versions=() + link_types=() + + # If generate_akams_links returns false, no fallback to old links. Just terminate. + # This function may also 'exit' (if the determined version is already installed). + generate_akams_links || return + + # Check other feeds only if we haven't been able to find an aka.ms link. + if [[ "${#download_links[@]}" -lt 1 ]]; then + for feed in ${feeds[@]} + do + # generate_regular_links may also 'exit' (if the determined version is already installed). + generate_regular_links $feed || return + done + fi + + if [[ "${#download_links[@]}" -eq 0 ]]; then + say_err "Failed to resolve the exact version number." + return 1 + fi + + say_verbose "Generated ${#download_links[@]} links." + for link_index in ${!download_links[@]} + do + say_verbose "Link $link_index: ${link_types[$link_index]}, ${effective_versions[$link_index]}, ${download_links[$link_index]}" + done +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed). +generate_akams_links() { + local valid_aka_ms_link=true; + + normalized_version="$(to_lowercase "$version")" + if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then + say_err "Quality and Version options are not allowed to be specified simultaneously. See https://learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details." + return 1 + fi + + if [[ -n "$json_file" || "$normalized_version" != "latest" ]]; then + # aka.ms links are not needed when exact version is specified via command or json file + return + fi + + get_download_link_from_aka_ms || valid_aka_ms_link=false + + if [[ "$valid_aka_ms_link" == true ]]; then + say_verbose "Retrieved primary payload URL from aka.ms link: '$aka_ms_download_link'." + say_verbose "Downloading using legacy url will not be attempted." + + download_link=$aka_ms_download_link + + #get version from the path + IFS='/' + read -ra pathElems <<< "$download_link" + count=${#pathElems[@]} + specific_version="${pathElems[count-2]}" + unset IFS; + say_verbose "Version: '$specific_version'." + + #Retrieve effective version + effective_version="$(get_specific_product_version "$azure_feed" "$specific_version" "$download_link")" + + # Add link info to arrays + download_links+=($download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("aka.ms") + + # Check if the SDK version is already installed. + if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "$asset_name with version '$effective_version' is already installed." + exit 0 + fi + + return 0 + fi + + # if quality is specified - exit with error - there is no fallback approach + if [ ! -z "$normalized_quality" ]; then + say_err "Failed to locate the latest version in the channel '$normalized_channel' with '$normalized_quality' quality for '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'." + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support." + return 1 + fi + say_verbose "Falling back to latest.version file approach." +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed) +# args: +# feed - $1 +generate_regular_links() { + local feed="$1" + local valid_legacy_download_link=true + + specific_version=$(get_specific_version_from_version "$feed" "$channel" "$normalized_architecture" "$version" "$json_file") || specific_version='0' + + if [[ "$specific_version" == '0' ]]; then + say_verbose "Failed to resolve the specific version number using feed '$feed'" + return + fi + + effective_version="$(get_specific_product_version "$feed" "$specific_version")" + say_verbose "specific_version=$specific_version" + + download_link="$(construct_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")" + say_verbose "Constructed primary named payload URL: $download_link" + + # Add link info to arrays + download_links+=($download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("primary") + + legacy_download_link="$(construct_legacy_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false + + if [ "$valid_legacy_download_link" = true ]; then + say_verbose "Constructed legacy named payload URL: $legacy_download_link" + + download_links+=($legacy_download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("legacy") + else + legacy_download_link="" + say_verbose "Could not construct a legacy_download_link; omitting..." + fi + + # Check if the SDK version is already installed. + if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "$asset_name with version '$effective_version' is already installed." + exit 0 + fi +} + +print_dry_run() { + + say "Payload URLs:" + + for link_index in "${!download_links[@]}" + do + say "URL #$link_index - ${link_types[$link_index]}: ${download_links[$link_index]}" + done + + resolved_version=${specific_versions[0]} + repeatable_command="./$script_name --version "\""$resolved_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\""" + + if [ ! -z "$normalized_quality" ]; then + repeatable_command+=" --quality "\""$normalized_quality"\""" + fi + + if [[ "$runtime" == "dotnet" ]]; then + repeatable_command+=" --runtime "\""dotnet"\""" + elif [[ "$runtime" == "aspnetcore" ]]; then + repeatable_command+=" --runtime "\""aspnetcore"\""" + fi + + repeatable_command+="$non_dynamic_parameters" + + if [ -n "$feed_credential" ]; then + repeatable_command+=" --feed-credential "\"""\""" + fi + + say "Repeatable invocation: $repeatable_command" +} + calculate_vars() { eval $invocation - valid_legacy_download_link=true + script_name=$(basename "$0") normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")" - say_verbose "normalized_architecture=$normalized_architecture" - + say_verbose "Normalized architecture: '$normalized_architecture'." normalized_os="$(get_normalized_os "$user_defined_os")" - say_verbose "normalized_os=$normalized_os" - - specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")" - specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")" - say_verbose "specific_version=$specific_version" - if [ -z "$specific_version" ]; then - say_err "Could not resolve version information." - return 1 - fi - - download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")" - say_verbose "Constructed primary named payload URL: $download_link" - - legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false - - if [ "$valid_legacy_download_link" = true ]; then - say_verbose "Constructed legacy named payload URL: $legacy_download_link" - else - say_verbose "Cound not construct a legacy_download_link; omitting..." - fi - + say_verbose "Normalized OS: '$normalized_os'." + normalized_quality="$(get_normalized_quality "$quality")" + say_verbose "Normalized quality: '$normalized_quality'." + normalized_channel="$(get_normalized_channel "$channel")" + say_verbose "Normalized channel: '$normalized_channel'." + normalized_product="$(get_normalized_product "$runtime")" + say_verbose "Normalized product: '$normalized_product'." install_root="$(resolve_installation_path "$install_dir")" - say_verbose "InstallRoot: $install_root" -} + say_verbose "InstallRoot: '$install_root'." -install_dotnet() { - eval $invocation - local download_failed=false - local asset_name='' - local asset_relative_path='' + normalized_architecture="$(get_normalized_architecture_for_specific_sdk_version "$version" "$normalized_channel" "$normalized_architecture")" if [[ "$runtime" == "dotnet" ]]; then asset_relative_path="shared/Microsoft.NETCore.App" @@ -882,89 +1605,60 @@ install_dotnet() { elif [ -z "$runtime" ]; then asset_relative_path="sdk" asset_name=".NET Core SDK" - else - say_err "Invalid value for \$runtime" - return 1 fi - # Check if the SDK version is already installed. - if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then - say "$asset_name version $specific_version is already installed." - return 0 - fi + get_feeds_to_use +} + +install_dotnet() { + eval $invocation + local download_failed=false + local download_completed=false + local remote_file_size=0 mkdir -p "$install_root" - zip_path="$(mktemp "$temporary_file_template")" - say_verbose "Zip path: $zip_path" + zip_path="${zip_path:-$(mktemp "$temporary_file_template")}" + say_verbose "Archive path: $zip_path" + for link_index in "${!download_links[@]}" + do + download_link="${download_links[$link_index]}" + specific_version="${specific_versions[$link_index]}" + effective_version="${effective_versions[$link_index]}" + link_type="${link_types[$link_index]}" - # Failures are normal in the non-legacy case for ultimately legacy downloads. - # Do not output to stderr, since output to stderr is considered an error. - say "Downloading primary link $download_link" + say "Attempting to download using $link_type link $download_link" - # The download function will set variables $http_code and $download_error_msg in case of failure. - download "$download_link" "$zip_path" 2>&1 || download_failed=true + # The download function will set variables $http_code and $download_error_msg in case of failure. + download_failed=false + download "$download_link" "$zip_path" 2>&1 || download_failed=true - # if the download fails, download the legacy_download_link - if [ "$download_failed" = true ]; then - primary_path_http_code="$http_code"; primary_path_download_error_msg="$download_error_msg" - case $primary_path_http_code in - 404) - say "The resource at $download_link is not available." - ;; - *) - say "$primary_path_download_error_msg" - ;; - esac - rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed" - if [ "$valid_legacy_download_link" = true ]; then - download_failed=false - download_link="$legacy_download_link" - zip_path="$(mktemp "$temporary_file_template")" - say_verbose "Legacy zip path: $zip_path" - - say "Downloading legacy link $download_link" - - # The download function will set variables $http_code and $download_error_msg in case of failure. - download "$download_link" "$zip_path" 2>&1 || download_failed=true - - if [ "$download_failed" = true ]; then - legacy_path_http_code="$http_code"; legacy_path_download_error_msg="$download_error_msg" - case $legacy_path_http_code in - 404) - say "The resource at $download_link is not available." - ;; - *) - say "$legacy_path_download_error_msg" - ;; - esac - rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed" - fi - fi - fi - - if [ "$download_failed" = true ]; then - if [[ "$primary_path_http_code" = "404" && ( "$valid_legacy_download_link" = false || "$legacy_path_http_code" = "404") ]]; then - say_err "Could not find \`$asset_name\` with version = $specific_version" - say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" + if [ "$download_failed" = true ]; then + case $http_code in + 404) + say "The resource at $link_type link '$download_link' is not available." + ;; + *) + say "Failed to download $link_type link '$download_link': $http_code $download_error_msg" + ;; + esac + rm -f "$zip_path" 2>&1 && say_verbose "Temporary archive file $zip_path was removed" else - say_err "Could not download: \`$asset_name\` with version = $specific_version" - # 404-NotFound is an expected response if it goes from only one of the links, do not show that error. - # If primary path is available (not 404-NotFound) then show the primary error else show the legacy error. - if [ "$primary_path_http_code" != "404" ]; then - say_err "$primary_path_download_error_msg" - return 1 - fi - if [[ "$valid_legacy_download_link" = true && "$legacy_path_http_code" != "404" ]]; then - say_err "$legacy_path_download_error_msg" - return 1 - fi + download_completed=true + break fi + done + + if [[ "$download_completed" == false ]]; then + say_err "Could not find \`$asset_name\` with version = $specific_version" + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" return 1 fi - say "Extracting zip from $download_link" - extract_dotnet_package "$zip_path" "$install_root" || return 1 + remote_file_size="$(get_remote_file_size "$download_link")" + + say "Extracting archive from $download_link" + extract_dotnet_package "$zip_path" "$install_root" "$remote_file_size" || return 1 # Check if the SDK version is installed; if not, fail the installation. # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed. @@ -975,19 +1669,21 @@ install_dotnet() { unset IFS; say_verbose "Checking installation: version = $release_version" if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then + say "Installed version is $effective_version" return 0 fi fi # Check if the standard SDK version is installed. - say_verbose "Checking installation: version = $specific_product_version" - if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then + say_verbose "Checking installation: version = $effective_version" + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "Installed version is $effective_version" return 0 fi # Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm. say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues." - say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error." + say_err "\`$asset_name\` with version = $effective_version failed to install with an error." return 1 } @@ -1004,13 +1700,14 @@ install_dir="" architecture="" dry_run=false no_path=false -no_cdn=false -azure_feed="https://dotnetcli.azureedge.net/dotnet" -uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet" +azure_feed="" +uncached_feed="" feed_credential="" verbose=false runtime="" runtime_id="" +quality="" +internal=false override_non_versioned_files=true non_dynamic_parameters="" user_defined_os="" @@ -1027,6 +1724,14 @@ do shift version="$1" ;; + -q|--quality|-[Qq]uality) + shift + quality="$1" + ;; + --internal|-[Ii]nternal) + internal=true + non_dynamic_parameters+=" $name" + ;; -i|--install-dir|-[Ii]nstall[Dd]ir) shift install_dir="$1" @@ -1067,10 +1772,6 @@ do verbose=true non_dynamic_parameters+=" $name" ;; - --no-cdn|-[Nn]o[Cc]dn) - no_cdn=true - non_dynamic_parameters+=" $name" - ;; --azure-feed|-[Aa]zure[Ff]eed) shift azure_feed="$1" @@ -1084,7 +1785,9 @@ do --feed-credential|-[Ff]eed[Cc]redential) shift feed_credential="$1" - non_dynamic_parameters+=" $name "\""$1"\""" + #feed_credential should start with "?", for it to be added to the end of the link. + #adding "?" at the beginning of the feed_credential if needed. + [[ -z "$(echo $feed_credential)" ]] || [[ $feed_credential == \?* ]] || feed_credential="?$feed_credential" ;; --runtime-id|-[Rr]untime[Ii]d) shift @@ -1100,36 +1803,64 @@ do override_non_versioned_files=false non_dynamic_parameters+=" $name" ;; + --keep-zip|-[Kk]eep[Zz]ip) + keep_zip=true + non_dynamic_parameters+=" $name" + ;; + --zip-path|-[Zz]ip[Pp]ath) + shift + zip_path="$1" + ;; -?|--?|-h|--help|-[Hh]elp) - script_name="$(basename "$0")" + script_name="dotnet-install.sh" echo ".NET Tools Installer" - echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]" + echo "Usage:" + echo " # Install a .NET SDK of a given Quality from a given Channel" + echo " $script_name [-c|--channel ] [-q|--quality ]" + echo " # Install a .NET SDK of a specific public version" + echo " $script_name [-v|--version ]" echo " $script_name -h|-?|--help" echo "" echo "$script_name is a simple command line interface for obtaining dotnet cli." + echo " Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" + echo " - The SDK needs to be installed without user interaction and without admin rights." + echo " - The SDK installation doesn't need to persist across multiple CI runs." + echo " To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer." echo "" echo "Options:" echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`." echo " -Channel" echo " Possible values:" - echo " - Current - most current release" - echo " - LTS - most current supported release" + echo " - STS - the most recent Standard Term Support release" + echo " - LTS - the most recent Long Term Support release" echo " - 2-part version in a format A.B - represents a specific release" echo " examples: 2.0; 1.0" - echo " - Branch name" - echo " examples: release/2.0.0; Master" - echo " Note: The version parameter overrides the channel parameter." + echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release" + echo " examples: 5.0.1xx, 5.0.2xx." + echo " Supported since 5.0 release" + echo " Warning: Value 'Current' is deprecated for the Channel parameter. Use 'STS' instead." + echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used." echo " -v,--version Use specific VERSION, Defaults to \`$version\`." echo " -Version" echo " Possible values:" - echo " - latest - most latest build on specific channel" + echo " - latest - the latest build on specific channel" echo " - 3-part version in a format A.B.C - represents specific version of build" echo " examples: 2.0.0-preview2-006120; 1.1.0" + echo " -q,--quality Download the latest build of specified quality in the channel." + echo " -Quality" + echo " The possible values are: daily, signed, validated, preview, GA." + echo " Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used." + echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported." + echo " Supported since 5.0 release." + echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality." + echo " --internal,-Internal Download internal builds. Requires providing credentials via --feed-credential parameter." + echo " --feed-credential Token to access Azure feed. Used as a query string to append to the Azure feed." + echo " -FeedCredential This parameter typically is not specified." echo " -i,--install-dir Install under specified location (see Install Location below)" echo " -InstallDir" echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`." echo " --arch,-Architecture,-Arch" - echo " Possible values: x64, arm, and arm64" + echo " Possible values: x64, arm, arm64, s390x, ppc64le and loongarch64" echo " --os Specifies operating system to be used when selecting the installer." echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6." echo " In case any other value is provided, the platform will be determined by the script based on machine configuration." @@ -1143,24 +1874,18 @@ do echo " --dry-run,-DryRun Do not perform installation. Display download link." echo " --no-path, -NoPath Do not set PATH for the current process." echo " --verbose,-Verbose Display diagnostics information." - echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user." - echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user." - echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified." + echo " --azure-feed,-AzureFeed For internal use only." + echo " Allows using a different storage to download SDK archives from." + echo " --uncached-feed,-UncachedFeed For internal use only." + echo " Allows using a different storage to download SDK archives from." echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable." echo " -SkipNonVersionedFiles" - echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly." echo " --jsonfile Determines the SDK version from a user specified global.json file." echo " Note: global.json must have a value for 'SDK:Version'" + echo " --keep-zip,-KeepZip If set, downloaded file is kept." + echo " --zip-path, -ZipPath If set, downloaded file is stored at the specified path." echo " -?,--?,-h,--help,-Help Shows this help message" echo "" - echo "Obsolete parameters:" - echo " --shared-runtime The recommended alternative is '--runtime dotnet'." - echo " This parameter is obsolete and may be removed in a future version of this script." - echo " Installs just the shared runtime bits, not the entire SDK." - echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)." - echo " -RuntimeId" The parameter is obsolete and may be removed in a future version of this script. Should be used only for versions below 2.1. - echo " For primary links to override OS or/and architecture, use --os and --architecture option instead." - echo "" echo "Install Location:" echo " Location is chosen in following order:" echo " - --install-dir option" @@ -1177,33 +1902,28 @@ do shift done -if [ "$no_cdn" = true ]; then - azure_feed="$uncached_feed" -fi +say_verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" +say_verbose "- The SDK needs to be installed without user interaction and without admin rights." +say_verbose "- The SDK installation doesn't need to persist across multiple CI runs." +say_verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n" -say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" -say "- The SDK needs to be installed without user interaction and without admin rights." -say "- The SDK installation doesn't need to persist across multiple CI runs." -say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n" +if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then + message="Provide credentials via --feed-credential parameter." + if [ "$dry_run" = true ]; then + say_warning "$message" + else + say_err "$message" + exit 1 + fi +fi check_min_reqs calculate_vars -script_name=$(basename "$0") +# generate_regular_links call below will 'exit' if the determined version is already installed. +generate_download_links -if [ "$dry_run" = true ]; then - say "Payload URLs:" - say "Primary named payload URL: $download_link" - if [ "$valid_legacy_download_link" = true ]; then - say "Legacy named payload URL: $legacy_download_link" - fi - repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\""" - if [[ "$runtime" == "dotnet" ]]; then - repeatable_command+=" --runtime "\""dotnet"\""" - elif [[ "$runtime" == "aspnetcore" ]]; then - repeatable_command+=" --runtime "\""aspnetcore"\""" - fi - repeatable_command+="$non_dynamic_parameters" - say "Repeatable invocation: $repeatable_command" +if [[ "$dry_run" = true ]]; then + print_dry_run exit 0 fi @@ -1218,5 +1938,5 @@ else fi say "Note that the script does not resolve dependencies during installation." -say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section." -say "Installation finished successfully." +say "To check the list of dependencies, go to https://learn.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section." +say "Installation finished successfully." \ No newline at end of file diff --git a/src/Misc/expressionFunc/hashFiles/package-lock.json b/src/Misc/expressionFunc/hashFiles/package-lock.json index 5cf1db16f..cc92e63f6 100644 --- a/src/Misc/expressionFunc/hashFiles/package-lock.json +++ b/src/Misc/expressionFunc/hashFiles/package-lock.json @@ -20,7 +20,7 @@ "eslint-plugin-github": "^4.10.0", "eslint-plugin-prettier": "^5.0.0", "husky": "^8.0.3", - "lint-staged": "^14.0.0", + "lint-staged": "^15.5.0", "prettier": "^3.0.3", "typescript": "^5.2.2" } @@ -61,10 +61,11 @@ } }, "node_modules/@babel/runtime": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.10.tgz", - "integrity": "sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", + "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", "dev": true, + "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -581,27 +582,16 @@ } }, "node_modules/ansi-escapes": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-5.0.0.tgz", - "integrity": "sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", "dev": true, + "license": "MIT", "dependencies": { - "type-fest": "^1.0.2" + "environment": "^1.0.0" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-escapes/node_modules/type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "dev": true, - "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -825,21 +815,23 @@ } }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, + "license": "MIT", "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -951,31 +943,33 @@ } }, "node_modules/cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", "dev": true, + "license": "MIT", "dependencies": { - "restore-cursor": "^4.0.0" + "restore-cursor": "^5.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/cli-truncate": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", - "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", + "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", "dev": true, + "license": "MIT", "dependencies": { "slice-ansi": "^5.0.0", - "string-width": "^5.0.0" + "string-width": "^7.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -1003,15 +997,17 @@ "version": "2.0.20", "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/commander": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-11.0.0.tgz", - "integrity": "sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==", + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-13.1.0.tgz", + "integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==", "dev": true, + "license": "MIT", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/concat-map": { @@ -1020,10 +1016,11 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -1040,12 +1037,13 @@ "dev": true }, "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "dev": true, + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -1157,12 +1155,6 @@ "node": ">=6.0.0" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, "node_modules/electron-to-chromium": { "version": "1.4.500", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.500.tgz", @@ -1175,6 +1167,19 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/es-abstract": { "version": "1.22.1", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.1.tgz", @@ -1734,7 +1739,8 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/execa": { "version": "7.2.0", @@ -1833,10 +1839,11 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, + "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -1927,6 +1934,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/get-intrinsic": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", @@ -2349,6 +2369,7 @@ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -2403,6 +2424,7 @@ "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } @@ -2647,46 +2669,52 @@ } }, "node_modules/lilconfig": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", - "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", "dev": true, + "license": "MIT", "engines": { - "node": ">=10" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" } }, "node_modules/lint-staged": { - "version": "14.0.1", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-14.0.1.tgz", - "integrity": "sha512-Mw0cL6HXnHN1ag0mN/Dg4g6sr8uf8sn98w2Oc1ECtFto9tvRF7nkXGJRbx8gPlHyoR0pLyBr2lQHbWwmUHe1Sw==", + "version": "15.5.0", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.5.0.tgz", + "integrity": "sha512-WyCzSbfYGhK7cU+UuDDkzUiytbfbi0ZdPy2orwtM75P3WTtQBzmG40cCxIa8Ii2+XjfxzLH6Be46tUfWS85Xfg==", "dev": true, + "license": "MIT", "dependencies": { - "chalk": "5.3.0", - "commander": "11.0.0", - "debug": "4.3.4", - "execa": "7.2.0", - "lilconfig": "2.1.0", - "listr2": "6.6.1", - "micromatch": "4.0.5", - "pidtree": "0.6.0", - "string-argv": "0.3.2", - "yaml": "2.3.1" + "chalk": "^5.4.1", + "commander": "^13.1.0", + "debug": "^4.4.0", + "execa": "^8.0.1", + "lilconfig": "^3.1.3", + "listr2": "^8.2.5", + "micromatch": "^4.0.8", + "pidtree": "^0.6.0", + "string-argv": "^0.3.2", + "yaml": "^2.7.0" }, "bin": { "lint-staged": "bin/lint-staged.js" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": ">=18.12.0" }, "funding": { "url": "https://opencollective.com/lint-staged" } }, "node_modules/lint-staged/node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", + "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", "dev": true, + "license": "MIT", "engines": { "node": "^12.17.0 || ^14.13 || >=16.0.0" }, @@ -2694,29 +2722,82 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/listr2": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-6.6.1.tgz", - "integrity": "sha512-+rAXGHh0fkEWdXBmX+L6mmfmXmXvDGEKzkjxO+8mP3+nI/r/CWznVBvsibXdxda9Zz0OW2e2ikphN3OwCT/jSg==", + "node_modules/lint-staged/node_modules/execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", "dev": true, + "license": "MIT", "dependencies": { - "cli-truncate": "^3.1.0", - "colorette": "^2.0.20", - "eventemitter3": "^5.0.1", - "log-update": "^5.0.1", - "rfdc": "^1.3.0", - "wrap-ansi": "^8.1.0" + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" }, "engines": { - "node": ">=16.0.0" + "node": ">=16.17" }, - "peerDependencies": { - "enquirer": ">= 2.3.0 < 3" + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/lint-staged/node_modules/get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=16" }, - "peerDependenciesMeta": { - "enquirer": { - "optional": true - } + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lint-staged/node_modules/human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=16.17.0" + } + }, + "node_modules/lint-staged/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/listr2": { + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.5.tgz", + "integrity": "sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "cli-truncate": "^4.0.0", + "colorette": "^2.0.20", + "eventemitter3": "^5.0.1", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/locate-path": { @@ -2765,29 +2846,31 @@ "dev": true }, "node_modules/log-update": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-5.0.1.tgz", - "integrity": "sha512-5UtUDQ/6edw4ofyljDNcOVJQ4c7OjDro4h3y8e1GQL5iYElYclVHJ3zeWchylvMaKnDbDilC8irOVyexnA/Slw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-escapes": "^5.0.0", - "cli-cursor": "^4.0.0", - "slice-ansi": "^5.0.0", - "strip-ansi": "^7.0.1", - "wrap-ansi": "^8.0.1" + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/log-update/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -2795,11 +2878,58 @@ "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, + "node_modules/log-update/node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-update/node_modules/is-fullwidth-code-point": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-east-asian-width": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-update/node_modules/slice-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, "node_modules/log-update/node_modules/strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -2838,12 +2968,13 @@ } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, + "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -2862,6 +2993,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -2883,10 +3027,11 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" }, "node_modules/natural-compare": { "version": "1.4.0", @@ -3311,45 +3456,51 @@ } }, "node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dev": true, + "license": "MIT", "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/restore-cursor/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/restore-cursor/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", "dev": true, + "license": "MIT", "dependencies": { - "mimic-fn": "^2.1.0" + "mimic-function": "^5.0.0" }, "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/reusify": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", @@ -3361,10 +3512,11 @@ } }, "node_modules/rfdc": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", - "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", - "dev": true + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", + "dev": true, + "license": "MIT" }, "node_modules/rimraf": { "version": "3.0.2", @@ -3610,6 +3762,7 @@ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-styles": "^6.0.0", "is-fullwidth-code-point": "^4.0.0" @@ -3626,6 +3779,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -3643,27 +3797,29 @@ } }, "node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, + "license": "MIT", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/string-width/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -3671,11 +3827,19 @@ "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, + "node_modules/string-width/node_modules/emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", + "dev": true, + "license": "MIT" + }, "node_modules/string-width/node_modules/strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -3849,6 +4013,7 @@ "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, + "license": "MIT", "dependencies": { "is-number": "^7.0.0" }, @@ -4118,27 +4283,29 @@ } }, "node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, + "license": "MIT", "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -4151,6 +4318,7 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -4163,6 +4331,7 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dev": true, + "license": "MIT", "dependencies": { "ansi-regex": "^6.0.1" }, @@ -4186,10 +4355,14 @@ "dev": true }, "node_modules/yaml": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", - "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", + "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, "engines": { "node": ">= 14" } @@ -4241,9 +4414,9 @@ } }, "@babel/runtime": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.10.tgz", - "integrity": "sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", + "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", "dev": true, "requires": { "regenerator-runtime": "^0.14.0" @@ -4574,20 +4747,12 @@ } }, "ansi-escapes": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-5.0.0.tgz", - "integrity": "sha512-5GFMVX8HqE/TB+FuBJGuO5XG0WrsA6ptUqoODaT/n9mmUaZFkqnBueB4leqGBCmrUHnCnC4PCZTCd0E7QQ83bA==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", "dev": true, "requires": { - "type-fest": "^1.0.2" - }, - "dependencies": { - "type-fest": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", - "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", - "dev": true - } + "environment": "^1.0.0" } }, "ansi-regex": { @@ -4748,21 +4913,21 @@ } }, "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "requires": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" } }, "browserslist": { @@ -4819,22 +4984,22 @@ } }, "cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", "dev": true, "requires": { - "restore-cursor": "^4.0.0" + "restore-cursor": "^5.0.0" } }, "cli-truncate": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", - "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-4.0.0.tgz", + "integrity": "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==", "dev": true, "requires": { "slice-ansi": "^5.0.0", - "string-width": "^5.0.0" + "string-width": "^7.0.0" } }, "color-convert": { @@ -4859,9 +5024,9 @@ "dev": true }, "commander": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-11.0.0.tgz", - "integrity": "sha512-9HMlXtt/BNoYr8ooyjjNRdIilOTkVJXB+GhxMTtOKwk0R4j4lS4NpjuqmRxroBfnfTSHQIHQB7wryHhXarNjmQ==", + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-13.1.0.tgz", + "integrity": "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==", "dev": true }, "concat-map": { @@ -4870,9 +5035,9 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dev": true, "requires": { "path-key": "^3.1.0", @@ -4887,12 +5052,12 @@ "dev": true }, "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "dev": true, "requires": { - "ms": "2.1.2" + "ms": "^2.1.3" } }, "deep-is": { @@ -4963,12 +5128,6 @@ "esutils": "^2.0.2" } }, - "eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "dev": true - }, "electron-to-chromium": { "version": "1.4.500", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.500.tgz", @@ -4981,6 +5140,12 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true }, + "environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true + }, "es-abstract": { "version": "1.22.1", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.1.tgz", @@ -5488,9 +5653,9 @@ } }, "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "requires": { "to-regex-range": "^5.0.1" @@ -5561,6 +5726,12 @@ "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true }, + "get-east-asian-width": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.3.0.tgz", + "integrity": "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==", + "dev": true + }, "get-intrinsic": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", @@ -6046,49 +6217,84 @@ } }, "lilconfig": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", - "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", "dev": true }, "lint-staged": { - "version": "14.0.1", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-14.0.1.tgz", - "integrity": "sha512-Mw0cL6HXnHN1ag0mN/Dg4g6sr8uf8sn98w2Oc1ECtFto9tvRF7nkXGJRbx8gPlHyoR0pLyBr2lQHbWwmUHe1Sw==", + "version": "15.5.0", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-15.5.0.tgz", + "integrity": "sha512-WyCzSbfYGhK7cU+UuDDkzUiytbfbi0ZdPy2orwtM75P3WTtQBzmG40cCxIa8Ii2+XjfxzLH6Be46tUfWS85Xfg==", "dev": true, "requires": { - "chalk": "5.3.0", - "commander": "11.0.0", - "debug": "4.3.4", - "execa": "7.2.0", - "lilconfig": "2.1.0", - "listr2": "6.6.1", - "micromatch": "4.0.5", - "pidtree": "0.6.0", - "string-argv": "0.3.2", - "yaml": "2.3.1" + "chalk": "^5.4.1", + "commander": "^13.1.0", + "debug": "^4.4.0", + "execa": "^8.0.1", + "lilconfig": "^3.1.3", + "listr2": "^8.2.5", + "micromatch": "^4.0.8", + "pidtree": "^0.6.0", + "string-argv": "^0.3.2", + "yaml": "^2.7.0" }, "dependencies": { "chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz", + "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==", + "dev": true + }, + "execa": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", + "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^8.0.1", + "human-signals": "^5.0.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^4.1.0", + "strip-final-newline": "^3.0.0" + } + }, + "get-stream": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", + "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "dev": true + }, + "human-signals": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", + "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "dev": true + }, + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", "dev": true } } }, "listr2": { - "version": "6.6.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-6.6.1.tgz", - "integrity": "sha512-+rAXGHh0fkEWdXBmX+L6mmfmXmXvDGEKzkjxO+8mP3+nI/r/CWznVBvsibXdxda9Zz0OW2e2ikphN3OwCT/jSg==", + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.5.tgz", + "integrity": "sha512-iyAZCeyD+c1gPyE9qpFu8af0Y+MRtmKOncdGoA2S5EY8iFq99dmmvkNnHiWo+pj0s7yH7l3KPIgee77tKpXPWQ==", "dev": true, "requires": { - "cli-truncate": "^3.1.0", + "cli-truncate": "^4.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", - "log-update": "^5.0.1", - "rfdc": "^1.3.0", - "wrap-ansi": "^8.1.0" + "log-update": "^6.1.0", + "rfdc": "^1.4.1", + "wrap-ansi": "^9.0.0" } }, "locate-path": { @@ -6131,24 +6337,49 @@ "dev": true }, "log-update": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-5.0.1.tgz", - "integrity": "sha512-5UtUDQ/6edw4ofyljDNcOVJQ4c7OjDro4h3y8e1GQL5iYElYclVHJ3zeWchylvMaKnDbDilC8irOVyexnA/Slw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dev": true, "requires": { - "ansi-escapes": "^5.0.0", - "cli-cursor": "^4.0.0", - "slice-ansi": "^5.0.0", - "strip-ansi": "^7.0.1", - "wrap-ansi": "^8.0.1" + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" }, "dependencies": { "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true }, + "ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.0.0.tgz", + "integrity": "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==", + "dev": true, + "requires": { + "get-east-asian-width": "^1.0.0" + } + }, + "slice-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", + "integrity": "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==", + "dev": true, + "requires": { + "ansi-styles": "^6.2.1", + "is-fullwidth-code-point": "^5.0.0" + } + }, "strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", @@ -6182,12 +6413,12 @@ "dev": true }, "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "requires": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" } }, @@ -6197,6 +6428,12 @@ "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", "dev": true }, + "mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "dev": true + }, "minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -6212,9 +6449,9 @@ "dev": true }, "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, "natural-compare": { @@ -6502,29 +6739,29 @@ "dev": true }, "restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dev": true, "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" }, "dependencies": { - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", "dev": true, "requires": { - "mimic-fn": "^2.1.0" + "mimic-function": "^5.0.0" } + }, + "signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true } } }, @@ -6535,9 +6772,9 @@ "dev": true }, "rfdc": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.0.tgz", - "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", "dev": true }, "rimraf": { @@ -6723,20 +6960,26 @@ "dev": true }, "string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dev": true, "requires": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" }, "dependencies": { "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "dev": true + }, + "emoji-regex": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.4.0.tgz", + "integrity": "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==", "dev": true }, "strip-ansi": { @@ -7038,20 +7281,20 @@ } }, "wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", + "integrity": "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==", "dev": true, "requires": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" }, "dependencies": { "ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true }, "ansi-styles": { @@ -7084,9 +7327,9 @@ "dev": true }, "yaml": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", - "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", + "integrity": "sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA==", "dev": true }, "yocto-queue": { diff --git a/src/Misc/expressionFunc/hashFiles/package.json b/src/Misc/expressionFunc/hashFiles/package.json index 3e83170ec..13a40ae40 100644 --- a/src/Misc/expressionFunc/hashFiles/package.json +++ b/src/Misc/expressionFunc/hashFiles/package.json @@ -11,7 +11,6 @@ "pack": "ncc build -o ../../layoutbin/hashFiles", "all": "npm run format && npm run lint && npm run build && npm run pack", "prepare": "cd ../../../../ && husky install" - }, "repository": { "type": "git", @@ -43,9 +42,9 @@ "eslint": "^8.47.0", "eslint-plugin-github": "^4.10.0", "eslint-plugin-prettier": "^5.0.0", - "prettier": "^3.0.3", - "typescript": "^5.2.2", "husky": "^8.0.3", - "lint-staged": "^14.0.0" + "lint-staged": "^15.5.0", + "prettier": "^3.0.3", + "typescript": "^5.2.2" } -} \ No newline at end of file +} diff --git a/src/Misc/externals.sh b/src/Misc/externals.sh index 383221e44..ca8f6c28c 100755 --- a/src/Misc/externals.sh +++ b/src/Misc/externals.sh @@ -3,12 +3,11 @@ PACKAGERUNTIME=$1 PRECACHE=$2 NODE_URL=https://nodejs.org/dist -UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download -NODE16_VERSION="16.20.2" -NODE20_VERSION="20.8.1" -# used only for win-arm64, remove node16 unofficial version when official version is available -NODE16_UNOFFICIAL_VERSION="16.20.0" +# When you update Node versions you must also create a new release of alpine_nodejs at that updated version. +# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started +NODE20_VERSION="20.19.4" +NODE24_VERSION="24.5.0" get_abs_path() { # exploits the fact that pwd will print abs path when no args @@ -139,10 +138,10 @@ function acquireExternalTool() { # Download the external tools only for Windows. if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin if [[ "$PRECACHE" != "" ]]; then acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere fi @@ -151,10 +150,10 @@ fi # Download the external tools only for Windows. if [[ "$PACKAGERUNTIME" == "win-arm64" ]]; then # todo: replace these with official release when available - acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin - acquireExternalTool "$UNOFFICIAL_NODE_URL/v${NODE16_UNOFFICIAL_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.exe" node20/bin acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/$PACKAGERUNTIME/node.lib" node20/bin + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.exe" node24/bin + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/$PACKAGERUNTIME/node.lib" node24/bin if [[ "$PRECACHE" != "" ]]; then acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere fi @@ -162,30 +161,29 @@ fi # Download the external tools only for OSX. if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-x64.tar.gz" node20 fix_nested_dir + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-x64.tar.gz" node24 fix_nested_dir fi if [[ "$PACKAGERUNTIME" == "osx-arm64" ]]; then # node.js v12 doesn't support macOS on arm64. - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-arm64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-darwin-arm64.tar.gz" node20 fix_nested_dir + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-darwin-arm64.tar.gz" node24 fix_nested_dir fi # Download the external tools for Linux PACKAGERUNTIMEs. if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir - acquireExternalTool "$NODE_ALPINE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-x64.tar.gz" node20 fix_nested_dir acquireExternalTool "$NODE_ALPINE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-alpine-x64.tar.gz" node20_alpine + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-x64.tar.gz" node24 fix_nested_dir + acquireExternalTool "$NODE_ALPINE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-alpine-x64.tar.gz" node24_alpine fi if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-arm64.tar.gz" node20 fix_nested_dir + acquireExternalTool "$NODE_URL/v${NODE24_VERSION}/node-v${NODE24_VERSION}-linux-arm64.tar.gz" node24 fix_nested_dir fi if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then - acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir acquireExternalTool "$NODE_URL/v${NODE20_VERSION}/node-v${NODE20_VERSION}-linux-armv7l.tar.gz" node20 fix_nested_dir fi diff --git a/src/Misc/layoutbin/RunnerService.js b/src/Misc/layoutbin/RunnerService.js index ba0a8c659..1024e8a5e 100644 --- a/src/Misc/layoutbin/RunnerService.js +++ b/src/Misc/layoutbin/RunnerService.js @@ -114,6 +114,11 @@ var runService = function () { ); stopping = true; } + } else if (code === 5) { + console.log( + "Runner listener exit with Session Conflict error, stop the service, no retry needed." + ); + stopping = true; } else { var messagePrefix = "Runner listener exit with undefined return code"; unknownFailureRetryCount++; diff --git a/src/Misc/layoutbin/hashFiles/index.js b/src/Misc/layoutbin/hashFiles/index.js index fa283f880..6e3d2d59d 100644 --- a/src/Misc/layoutbin/hashFiles/index.js +++ b/src/Misc/layoutbin/hashFiles/index.js @@ -3299,7 +3299,7 @@ function expand(str, isTop) { var isOptions = m.body.indexOf(',') >= 0; if (!isSequence && !isOptions) { // {a},b} - if (m.post.match(/,.*\}/)) { + if (m.post.match(/,(?!,).*\}/)) { str = m.pre + '{' + m.body + escClose + m.post; return expand(str); } diff --git a/src/Misc/layoutbin/runsvc.sh b/src/Misc/layoutbin/runsvc.sh index c13564567..63d1b62e1 100755 --- a/src/Misc/layoutbin/runsvc.sh +++ b/src/Misc/layoutbin/runsvc.sh @@ -10,7 +10,7 @@ if [ -f ".path" ]; then echo ".path=${PATH}" fi -nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16} +nodever="node20" # insert anything to setup env when running as a service # run the host process which keep the listener alive diff --git a/src/Misc/layoutbin/update.sh.template b/src/Misc/layoutbin/update.sh.template index 4ca6b00e6..82ada18b9 100755 --- a/src/Misc/layoutbin/update.sh.template +++ b/src/Misc/layoutbin/update.sh.template @@ -123,7 +123,7 @@ fi # fix upgrade issue with macOS when running as a service attemptedtargetedfix=0 currentplatform=$(uname | awk '{print tolower($0)}') -if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then +if [[ "$currentplatform" == 'darwin' && $restartinteractiverunner -eq 0 ]]; then # We needed a fix for https://github.com/actions/runner/issues/743 # We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service # Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file @@ -135,12 +135,23 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then then # inspect the open file handles to find the node process # we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks - nodever="node16" + # Try finding node24 first, then fallback to earlier versions if needed + nodever="node24" path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-) - if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12 + if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node20 then - nodever="node12" + nodever="node20" path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-) + if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node16 + then + nodever="node16" + path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-) + if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12 + then + nodever="node12" + path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-) + fi + fi fi if [[ $? -eq 0 && -n "$path" ]] then @@ -178,6 +189,19 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then fi fi +# update runsvc.sh +if [ -f "$rootfolder/runsvc.sh" ] +then + date "+[%F %T-%4N] Update runsvc.sh" >> "$logfile" 2>&1 + cat "$rootfolder/bin/runsvc.sh" > "$rootfolder/runsvc.sh" + if [ $? -ne 0 ] + then + date "+[%F %T-%4N] Can't update $rootfolder/runsvc.sh using $rootfolder/bin/runsvc.sh" >> "$logfile" 2>&1 + mv -fv "$logfile" "$logfile.failed" + exit 1 + fi +fi + date "+[%F %T-%4N] Update succeed" >> "$logfile" touch update.finished diff --git a/src/Misc/layoutroot/run-helper.cmd.template b/src/Misc/layoutroot/run-helper.cmd.template index 221e8b1c0..6b594d4f3 100644 --- a/src/Misc/layoutroot/run-helper.cmd.template +++ b/src/Misc/layoutroot/run-helper.cmd.template @@ -49,5 +49,10 @@ if %ERRORLEVEL% EQU 4 ( exit /b 1 ) +if %ERRORLEVEL% EQU 5 ( + echo "Runner listener exit with Session Conflict error, stop the service, no retry needed." + exit /b 0 +) + echo "Exiting after unknown error code: %ERRORLEVEL%" exit /b 0 \ No newline at end of file diff --git a/src/Misc/layoutroot/run-helper.sh.template b/src/Misc/layoutroot/run-helper.sh.template index 743fd8b69..9f2b3cc44 100755 --- a/src/Misc/layoutroot/run-helper.sh.template +++ b/src/Misc/layoutroot/run-helper.sh.template @@ -70,6 +70,9 @@ elif [[ $returnCode == 4 ]]; then "$DIR"/safe_sleep.sh 1 done exit 2 +elif [[ $returnCode == 5 ]]; then + echo "Runner listener exit with Session Conflict error, stop the service, no retry needed." + exit 0 else echo "Exiting with unknown error code: ${returnCode}" exit 0 diff --git a/src/Misc/layoutroot/run.sh b/src/Misc/layoutroot/run.sh index 6b02ea18f..57f18ee00 100755 --- a/src/Misc/layoutroot/run.sh +++ b/src/Misc/layoutroot/run.sh @@ -38,7 +38,7 @@ runWithManualTrap() { cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh "$DIR"/run-helper.sh $* & PID=$! - wait -f $PID + wait $PID returnCode=$? if [[ $returnCode -eq 2 ]]; then echo "Restarting runner..." @@ -84,4 +84,4 @@ if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then run $* else runWithManualTrap $* -fi \ No newline at end of file +fi diff --git a/src/Misc/layoutroot/safe_sleep.sh b/src/Misc/layoutroot/safe_sleep.sh index 7ba5be325..0c7d12f13 100644 --- a/src/Misc/layoutroot/safe_sleep.sh +++ b/src/Misc/layoutroot/safe_sleep.sh @@ -1,6 +1,6 @@ #!/bin/bash SECONDS=0 -while [[ $SECONDS != $1 ]]; do +while [[ $SECONDS -lt $1 ]]; do : done diff --git a/src/Runner.Common/ActionsRunServer.cs b/src/Runner.Common/ActionsRunServer.cs index 704a690e3..3ded58007 100644 --- a/src/Runner.Common/ActionsRunServer.cs +++ b/src/Runner.Common/ActionsRunServer.cs @@ -20,12 +20,12 @@ namespace GitHub.Runner.Common { private bool _hasConnection; private VssConnection _connection; - private TaskAgentHttpClient _taskAgentClient; + private ActionsRunServerHttpClient _actionsRunServerClient; public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials) { _connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100)); - _taskAgentClient = _connection.GetClient(); + _actionsRunServerClient = _connection.GetClient(); _hasConnection = true; } @@ -42,7 +42,7 @@ namespace GitHub.Runner.Common CheckConnection(); var jobMessage = RetryRequest(async () => { - return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken); + return await _actionsRunServerClient.GetJobMessageAsync(id, cancellationToken); }, cancellationToken); return jobMessage; diff --git a/src/Runner.Common/AuthMigration.cs b/src/Runner.Common/AuthMigration.cs new file mode 100644 index 000000000..a951215f0 --- /dev/null +++ b/src/Runner.Common/AuthMigration.cs @@ -0,0 +1,13 @@ +using System; + +namespace GitHub.Runner.Common +{ + public class AuthMigrationEventArgs : EventArgs + { + public AuthMigrationEventArgs(string trace) + { + Trace = trace; + } + public string Trace { get; private set; } + } +} diff --git a/src/Runner.Common/BrokerServer.cs b/src/Runner.Common/BrokerServer.cs index 77bf5d882..751ae1eee 100644 --- a/src/Runner.Common/BrokerServer.cs +++ b/src/Runner.Common/BrokerServer.cs @@ -7,6 +7,7 @@ using GitHub.DistributedTask.Pipelines; using GitHub.DistributedTask.WebApi; using GitHub.Runner.Sdk; using GitHub.Services.Common; +using GitHub.Services.WebApi; using Sdk.RSWebApi.Contracts; using Sdk.WebApi.WebApi.RawClient; @@ -21,6 +22,12 @@ namespace GitHub.Runner.Common Task DeleteSessionAsync(CancellationToken cancellationToken); Task GetRunnerMessageAsync(Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token); + + Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken token); + + Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials); + + Task ForceRefreshConnection(VssCredentials credentials); } public sealed class BrokerServer : RunnerService, IBrokerServer @@ -32,6 +39,7 @@ namespace GitHub.Runner.Common public async Task ConnectAsync(Uri serverUri, VssCredentials credentials) { + Trace.Entering(); _brokerUri = serverUri; _connection = VssUtil.CreateRawConnection(serverUri, credentials); @@ -59,15 +67,53 @@ namespace GitHub.Runner.Common { CheckConnection(); var brokerSession = RetryRequest( - async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken); + async () => await _brokerHttpClient.GetRunnerMessageAsync(sessionId, version, status, os, architecture, disableUpdate, cancellationToken), cancellationToken, shouldRetry: ShouldRetryException); return brokerSession; } + public async Task AcknowledgeRunnerRequestAsync(string runnerRequestId, Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, CancellationToken cancellationToken) + { + CheckConnection(); + + // No retries + await _brokerHttpClient.AcknowledgeRunnerRequestAsync(runnerRequestId, sessionId, version, status, os, architecture, cancellationToken); + } + public async Task DeleteSessionAsync(CancellationToken cancellationToken) { CheckConnection(); await _brokerHttpClient.DeleteSessionAsync(cancellationToken); } + + public Task UpdateConnectionIfNeeded(Uri serverUri, VssCredentials credentials) + { + if (_brokerUri != serverUri || !_hasConnection) + { + return ConnectAsync(serverUri, credentials); + } + + return Task.CompletedTask; + } + + public Task ForceRefreshConnection(VssCredentials credentials) + { + if (!string.IsNullOrEmpty(_brokerUri?.AbsoluteUri)) + { + return ConnectAsync(_brokerUri, credentials); + } + + return Task.CompletedTask; + } + + public bool ShouldRetryException(Exception ex) + { + if (ex is AccessDeniedException || ex is RunnerNotFoundException || ex is HostedRunnerDeprovisionedException) + { + return false; + } + + return true; + } } } diff --git a/src/Runner.Common/ConfigurationStore.cs b/src/Runner.Common/ConfigurationStore.cs index 8528e5095..8d47f96c0 100644 --- a/src/Runner.Common/ConfigurationStore.cs +++ b/src/Runner.Common/ConfigurationStore.cs @@ -116,11 +116,15 @@ namespace GitHub.Runner.Common bool IsConfigured(); bool IsServiceConfigured(); bool HasCredentials(); + bool IsMigratedConfigured(); CredentialData GetCredentials(); CredentialData GetMigratedCredentials(); RunnerSettings GetSettings(); + RunnerSettings GetMigratedSettings(); void SaveCredential(CredentialData credential); + void SaveMigratedCredential(CredentialData credential); void SaveSettings(RunnerSettings settings); + void SaveMigratedSettings(RunnerSettings settings); void DeleteCredential(); void DeleteMigratedCredential(); void DeleteSettings(); @@ -130,6 +134,7 @@ namespace GitHub.Runner.Common { private string _binPath; private string _configFilePath; + private string _migratedConfigFilePath; private string _credFilePath; private string _migratedCredFilePath; private string _serviceConfigFilePath; @@ -137,6 +142,7 @@ namespace GitHub.Runner.Common private CredentialData _creds; private CredentialData _migratedCreds; private RunnerSettings _settings; + private RunnerSettings _migratedSettings; public override void Initialize(IHostContext hostContext) { @@ -154,6 +160,9 @@ namespace GitHub.Runner.Common _configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner); Trace.Info("ConfigFilePath: {0}", _configFilePath); + _migratedConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedRunner); + Trace.Info("MigratedConfigFilePath: {0}", _migratedConfigFilePath); + _credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials); Trace.Info("CredFilePath: {0}", _credFilePath); @@ -169,7 +178,7 @@ namespace GitHub.Runner.Common public bool HasCredentials() { Trace.Info("HasCredentials()"); - bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists; + bool credsStored = new FileInfo(_credFilePath).Exists || new FileInfo(_migratedCredFilePath).Exists; Trace.Info("stored {0}", credsStored); return credsStored; } @@ -177,7 +186,7 @@ namespace GitHub.Runner.Common public bool IsConfigured() { Trace.Info("IsConfigured()"); - bool configured = new FileInfo(_configFilePath).Exists; + bool configured = new FileInfo(_configFilePath).Exists || new FileInfo(_migratedConfigFilePath).Exists; Trace.Info("IsConfigured: {0}", configured); return configured; } @@ -185,11 +194,19 @@ namespace GitHub.Runner.Common public bool IsServiceConfigured() { Trace.Info("IsServiceConfigured()"); - bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists; + bool serviceConfigured = new FileInfo(_serviceConfigFilePath).Exists; Trace.Info($"IsServiceConfigured: {serviceConfigured}"); return serviceConfigured; } + public bool IsMigratedConfigured() + { + Trace.Info("IsMigratedConfigured()"); + bool configured = new FileInfo(_migratedConfigFilePath).Exists; + Trace.Info("IsMigratedConfigured: {0}", configured); + return configured; + } + public CredentialData GetCredentials() { if (_creds == null) @@ -229,6 +246,25 @@ namespace GitHub.Runner.Common return _settings; } + public RunnerSettings GetMigratedSettings() + { + if (_migratedSettings == null) + { + RunnerSettings configuredSettings = null; + if (File.Exists(_migratedConfigFilePath)) + { + string json = File.ReadAllText(_migratedConfigFilePath, Encoding.UTF8); + Trace.Info($"Read migrated setting file: {json.Length} chars"); + configuredSettings = StringUtil.ConvertFromJson(json); + } + + ArgUtil.NotNull(configuredSettings, nameof(configuredSettings)); + _migratedSettings = configuredSettings; + } + + return _migratedSettings; + } + public void SaveCredential(CredentialData credential) { Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath); @@ -244,6 +280,21 @@ namespace GitHub.Runner.Common File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden); } + public void SaveMigratedCredential(CredentialData credential) + { + Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath); + if (File.Exists(_migratedCredFilePath)) + { + // Delete existing credential file first, since the file is hidden and not able to overwrite. + Trace.Info("Delete exist runner migrated credential file."); + IOUtil.DeleteFile(_migratedCredFilePath); + } + + IOUtil.SaveObject(credential, _migratedCredFilePath); + Trace.Info("Migrated Credentials Saved."); + File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden); + } + public void SaveSettings(RunnerSettings settings) { Trace.Info("Saving runner settings."); @@ -259,6 +310,21 @@ namespace GitHub.Runner.Common File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden); } + public void SaveMigratedSettings(RunnerSettings settings) + { + Trace.Info("Saving runner migrated settings"); + if (File.Exists(_migratedConfigFilePath)) + { + // Delete existing settings file first, since the file is hidden and not able to overwrite. + Trace.Info("Delete exist runner migrated settings file."); + IOUtil.DeleteFile(_migratedConfigFilePath); + } + + IOUtil.SaveObject(settings, _migratedConfigFilePath); + Trace.Info("Migrated Settings Saved."); + File.SetAttributes(_migratedConfigFilePath, File.GetAttributes(_migratedConfigFilePath) | FileAttributes.Hidden); + } + public void DeleteCredential() { IOUtil.Delete(_credFilePath, default(CancellationToken)); @@ -273,6 +339,12 @@ namespace GitHub.Runner.Common public void DeleteSettings() { IOUtil.Delete(_configFilePath, default(CancellationToken)); + IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken)); + } + + public void DeleteMigratedSettings() + { + IOUtil.Delete(_migratedConfigFilePath, default(CancellationToken)); } } } diff --git a/src/Runner.Common/Constants.cs b/src/Runner.Common/Constants.cs index 177e3c98f..6c288eb2d 100644 --- a/src/Runner.Common/Constants.cs +++ b/src/Runner.Common/Constants.cs @@ -18,6 +18,7 @@ namespace GitHub.Runner.Common public enum WellKnownConfigFile { Runner, + MigratedRunner, Credentials, MigratedCredentials, RSACredentials, @@ -153,15 +154,37 @@ namespace GitHub.Runner.Common public const int RetryableError = 2; public const int RunnerUpdating = 3; public const int RunOnceRunnerUpdating = 4; + public const int SessionConflict = 5; + // Temporary error code to indicate that the runner configuration has been refreshed + // and the runner should be restarted. This is a temporary code and will be removed in the future after + // the runner is migrated to runner admin. + public const int RunnerConfigurationRefreshed = 6; } public static class Features { public static readonly string DiskSpaceWarning = "runner.diskspace.warning"; - public static readonly string Node16Warning = "DistributedTask.AddWarningToNode16Action"; public static readonly string LogTemplateErrorsAsDebugMessages = "DistributedTask.LogTemplateErrorsAsDebugMessages"; public static readonly string UseContainerPathForTemplate = "DistributedTask.UseContainerPathForTemplate"; public static readonly string AllowRunnerContainerHooks = "DistributedTask.AllowRunnerContainerHooks"; + public static readonly string AddCheckRunIdToJobContext = "actions_add_check_run_id_to_job_context"; + public static readonly string DisplayHelpfulActionsDownloadErrors = "actions_display_helpful_actions_download_errors"; + } + + // Node version migration related constants + public static class NodeMigration + { + // Node versions + public static readonly string Node20 = "node20"; + public static readonly string Node24 = "node24"; + + // Environment variables for controlling node version selection + public static readonly string ForceNode24Variable = "FORCE_JAVASCRIPT_ACTIONS_TO_NODE24"; + public static readonly string AllowUnsecureNodeVersionVariable = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION"; + + // Feature flags for controlling the migration phases + public static readonly string UseNode24ByDefaultFlag = "actions.runner.usenode24bydefault"; + public static readonly string RequireNode24Flag = "actions.runner.requirenode24"; } public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry"; @@ -175,11 +198,6 @@ namespace GitHub.Runner.Common public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`."; public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary"; - public static readonly string DetectedNodeAfterEndOfLifeMessage = "Node.js 16 actions are deprecated. Please update the following actions to use Node.js 20: {0}. For more information see: https://github.blog/changelog/2023-09-22-github-actions-transitioning-from-node-16-to-node-20/."; - public static readonly string DeprecatedNodeDetectedAfterEndOfLifeActions = "DeprecatedNodeActionsMessageWarnings"; - public static readonly string DeprecatedNodeVersion = "node16"; - public static readonly string EnforcedNode12DetectedAfterEndOfLife = "The following actions uses node12 which is deprecated and will be forced to run on node16: {0}. For more info: https://github.blog/changelog/2023-06-13-github-actions-all-actions-will-run-on-node16-instead-of-node12-by-default/"; - public static readonly string EnforcedNode12DetectedAfterEndOfLifeEnvVariable = "Node16ForceActionsWarnings"; } public static class RunnerEvent @@ -250,14 +268,13 @@ namespace GitHub.Runner.Common public static readonly string RequireJobContainer = "ACTIONS_RUNNER_REQUIRE_JOB_CONTAINER"; public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG"; public static readonly string StepDebug = "ACTIONS_STEP_DEBUG"; - public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION"; } public static class Agent { public static readonly string ToolsDirectory = "agent.ToolsDirectory"; - // Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions. + // Set this env var to "nodeXY" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions. public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION"; public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION"; public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT"; diff --git a/src/Runner.Common/HostContext.cs b/src/Runner.Common/HostContext.cs index 78ea8ba4c..ffb08684a 100644 --- a/src/Runner.Common/HostContext.cs +++ b/src/Runner.Common/HostContext.cs @@ -15,6 +15,7 @@ using System.Threading.Tasks; using GitHub.DistributedTask.Logging; using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; +using GitHub.Services.WebApi.Jwt; namespace GitHub.Runner.Common { @@ -36,6 +37,12 @@ namespace GitHub.Runner.Common event EventHandler Unloading; void ShutdownRunner(ShutdownReason reason); void WritePerfCounter(string counter); + void LoadDefaultUserAgents(); + + bool AllowAuthMigration { get; } + void EnableAuthMigration(string trace); + void DeferAuthMigration(TimeSpan deferred, string trace); + event EventHandler AuthMigrationChanged; } public enum StartupType @@ -67,17 +74,28 @@ namespace GitHub.Runner.Common private StartupType _startupType; private string _perfFile; private RunnerWebProxy _webProxy = new(); + private string _hostType = string.Empty; + + // disable auth migration by default + private readonly ManualResetEventSlim _allowAuthMigration = new ManualResetEventSlim(false); + private DateTime _deferredAuthMigrationTime = DateTime.MaxValue; + private readonly object _authMigrationLock = new object(); + private CancellationTokenSource _authMigrationAutoReenableTaskCancellationTokenSource = new(); + private Task _authMigrationAutoReenableTask; public event EventHandler Unloading; + public event EventHandler AuthMigrationChanged; public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token; public ShutdownReason RunnerShutdownReason { get; private set; } public ISecretMasker SecretMasker => _secretMasker; public List UserAgents => _userAgents; public RunnerWebProxy WebProxy => _webProxy; + public bool AllowAuthMigration => _allowAuthMigration.IsSet; public HostContext(string hostType, string logFile = null) { // Validate args. ArgUtil.NotNullOrEmpty(hostType, nameof(hostType)); + _hostType = hostType; _loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly); _loadContext.Unloading += LoadContext_Unloading; @@ -196,6 +214,81 @@ namespace GitHub.Runner.Common } } + if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY"))) + { + _trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable."); + } + + LoadDefaultUserAgents(); + } + + // marked as internal for testing + internal async Task AuthMigrationAuthReenableAsync(TimeSpan refreshInterval, CancellationToken token) + { + try + { + while (!token.IsCancellationRequested) + { + _trace.Verbose($"Auth migration defer timer is set to expire at {_deferredAuthMigrationTime.ToString("O")}. AllowAuthMigration: {_allowAuthMigration.IsSet}."); + await Task.Delay(refreshInterval, token); + if (!_allowAuthMigration.IsSet && DateTime.UtcNow > _deferredAuthMigrationTime) + { + _trace.Info($"Auth migration defer timer expired. Allowing auth migration."); + EnableAuthMigration("Auth migration defer timer expired."); + } + } + } + catch (TaskCanceledException) + { + // Task was cancelled, exit the loop. + } + catch (Exception ex) + { + _trace.Info("Error in auth migration reenable task."); + _trace.Error(ex); + } + } + + public void EnableAuthMigration(string trace) + { + _allowAuthMigration.Set(); + + lock (_authMigrationLock) + { + if (_authMigrationAutoReenableTask == null) + { + var refreshIntervalInMS = 60 * 1000; +#if DEBUG + // For L0, we will refresh faster + if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL"))) + { + refreshIntervalInMS = int.Parse(Environment.GetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL")); + } +#endif + _authMigrationAutoReenableTask = AuthMigrationAuthReenableAsync(TimeSpan.FromMilliseconds(refreshIntervalInMS), _authMigrationAutoReenableTaskCancellationTokenSource.Token); + } + } + + _trace.Info($"Enable auth migration at {DateTime.UtcNow.ToString("O")}."); + AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace)); + } + + public void DeferAuthMigration(TimeSpan deferred, string trace) + { + _allowAuthMigration.Reset(); + + // defer migration for a while + lock (_authMigrationLock) + { + _deferredAuthMigrationTime = DateTime.UtcNow.Add(deferred); + } + + _trace.Info($"Disabled auth migration until {_deferredAuthMigrationTime.ToString("O")}."); + AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace)); + } + + public void LoadDefaultUserAgents() + { if (string.IsNullOrEmpty(WebProxy.HttpProxyAddress) && string.IsNullOrEmpty(WebProxy.HttpsProxyAddress)) { _trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)"); @@ -205,11 +298,6 @@ namespace GitHub.Runner.Common _userAgents.Add(new ProductInfoHeaderValue("HttpProxyConfigured", bool.TrueString)); } - if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY"))) - { - _trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable."); - } - var credFile = GetConfigFile(WellKnownConfigFile.Credentials); if (File.Exists(credFile)) { @@ -219,6 +307,36 @@ namespace GitHub.Runner.Common { _userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId)); } + + // for Hosted runner, we can pull orchestrationId from JWT claims of the runner listening token. + if (credData != null && + credData.Scheme == Constants.Configuration.OAuthAccessToken && + credData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out var accessToken) && + !string.IsNullOrEmpty(accessToken)) + { + try + { + var jwt = JsonWebToken.Create(accessToken); + var claims = jwt.ExtractClaims(); + var orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value; + if (string.IsNullOrEmpty(orchestrationId)) + { + // fallback to orchid for C# actions-service + orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value; + } + + if (!string.IsNullOrEmpty(orchestrationId)) + { + _trace.Info($"Pull OrchestrationId {orchestrationId} from runner JWT claims"); + _userAgents.Insert(0, new ProductInfoHeaderValue("OrchestrationId", orchestrationId)); + } + } + catch (Exception ex) + { + _trace.Error("Fail to extract OrchestrationId from runner JWT claims"); + _trace.Error(ex); + } + } } var runnerFile = GetConfigFile(WellKnownConfigFile.Runner); @@ -244,6 +362,11 @@ namespace GitHub.Runner.Common _trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests."); _userAgents.Add(extraUserAgentHeader); } + + var currentProcess = Process.GetCurrentProcess(); + _userAgents.Add(new ProductInfoHeaderValue("Pid", currentProcess.Id.ToString())); + _userAgents.Add(new ProductInfoHeaderValue("CreationTime", Uri.EscapeDataString(DateTime.UtcNow.ToString("O")))); + _userAgents.Add(new ProductInfoHeaderValue($"({_hostType})")); } public string GetDirectory(WellKnownDirectory directory) @@ -330,6 +453,12 @@ namespace GitHub.Runner.Common ".runner"); break; + case WellKnownConfigFile.MigratedRunner: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".runner_migrated"); + break; + case WellKnownConfigFile.Credentials: path = Path.Combine( GetDirectory(WellKnownDirectory.Root), @@ -530,6 +659,18 @@ namespace GitHub.Runner.Common _loadContext.Unloading -= LoadContext_Unloading; _loadContext = null; } + + if (_authMigrationAutoReenableTask != null) + { + _authMigrationAutoReenableTaskCancellationTokenSource?.Cancel(); + } + + if (_authMigrationAutoReenableTaskCancellationTokenSource != null) + { + _authMigrationAutoReenableTaskCancellationTokenSource?.Dispose(); + _authMigrationAutoReenableTaskCancellationTokenSource = null; + } + _httpTraceSubscription?.Dispose(); _diagListenerSubscription?.Dispose(); _traceManager?.Dispose(); @@ -616,7 +757,7 @@ namespace GitHub.Runner.Common payload[0] = Enum.Parse(typeof(GitHub.Services.Common.VssCredentialsType), ((int)payload[0]).ToString()); } - if (payload.Length > 0) + if (payload.Length > 0 && !string.IsNullOrEmpty(eventData.Message)) { message = String.Format(eventData.Message.Replace("%n", Environment.NewLine), payload); } diff --git a/src/Runner.Common/JobServer.cs b/src/Runner.Common/JobServer.cs index ec90d879c..eac20a2b9 100644 --- a/src/Runner.Common/JobServer.cs +++ b/src/Runner.Common/JobServer.cs @@ -4,6 +4,7 @@ using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; +using System.Net.Security; using System.Net.WebSockets; using System.Text; using System.Threading; @@ -179,6 +180,10 @@ namespace GitHub.Runner.Common userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent()); userAgentValues.AddRange(HostContext.UserAgents); this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString()))); + if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY"))) + { + this._websocketClient.Options.RemoteCertificateValidationCallback = (_, _, _, _) => true; + } this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay); } diff --git a/src/Runner.Common/JobServerQueue.cs b/src/Runner.Common/JobServerQueue.cs index c1425b807..74c12bea2 100644 --- a/src/Runner.Common/JobServerQueue.cs +++ b/src/Runner.Common/JobServerQueue.cs @@ -19,7 +19,7 @@ namespace GitHub.Runner.Common TaskCompletionSource JobRecordUpdated { get; } event EventHandler JobServerQueueThrottling; Task ShutdownAsync(); - void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false); + void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false); void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null); void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines); @@ -74,6 +74,7 @@ namespace GitHub.Runner.Common private readonly List _jobTelemetries = new(); private bool _queueInProcess = false; private bool _resultsServiceOnly = false; + private int _resultsServiceExceptionsCount = 0; private Stopwatch _resultsUploadTimer = new(); private Stopwatch _actionsUploadTimer = new(); @@ -104,11 +105,10 @@ namespace GitHub.Runner.Common _resultsServer = hostContext.GetService(); } - public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false, bool enableTelemetry = false) + public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultsServiceOnly = false) { Trace.Entering(); _resultsServiceOnly = resultsServiceOnly; - _enableTelemetry = enableTelemetry; var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); @@ -139,6 +139,12 @@ namespace GitHub.Runner.Common _resultsClientInitiated = true; } + // Enable telemetry if we have both results service and actions service + if (_resultsClientInitiated && !_resultsServiceOnly) + { + _enableTelemetry = true; + } + if (_queueInProcess) { Trace.Info("No-opt, all queue process tasks are running."); @@ -574,9 +580,9 @@ namespace GitHub.Runner.Common Trace.Info("Catch exception during file upload to results, keep going since the process is best effort."); Trace.Error(ex); errorCount++; - + _resultsServiceExceptionsCount++; // If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs - if (!_resultsServiceOnly) + if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3) { _resultsClientInitiated = false; SendResultsTelemetry(ex); @@ -607,7 +613,7 @@ namespace GitHub.Runner.Common private void SendResultsTelemetry(Exception ex) { - var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" }; + var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {HostContext.SecretMasker.MaskSecrets(ex.Message)}" }; issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure; var telemetryRecord = new TimelineRecord() @@ -703,7 +709,9 @@ namespace GitHub.Runner.Common { Trace.Info("Catch exception during update steps, skip update Results."); Trace.Error(e); - if (!_resultsServiceOnly) + _resultsServiceExceptionsCount++; + // If we hit any exceptions uploading to Results, let's skip any additional uploads to Results unless Results is serving logs + if (!_resultsServiceOnly && _resultsServiceExceptionsCount > 3) { _resultsClientInitiated = false; SendResultsTelemetry(e); diff --git a/src/Runner.Common/LaunchServer.cs b/src/Runner.Common/LaunchServer.cs index e1b1b0f4f..6fb69833e 100644 --- a/src/Runner.Common/LaunchServer.cs +++ b/src/Runner.Common/LaunchServer.cs @@ -1,11 +1,12 @@ using System; using System.Collections.Generic; -using System.Linq; +using System.Net.Http; using System.Threading; using System.Threading.Tasks; using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Sdk; +using GitHub.Services.Common; using GitHub.Services.Launch.Client; -using GitHub.Services.WebApi; namespace GitHub.Runner.Common { @@ -14,7 +15,7 @@ namespace GitHub.Runner.Common { void InitializeLaunchClient(Uri uri, string token); - Task ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken); + Task ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors); } public sealed class LaunchServer : RunnerService, ILaunchServer @@ -23,17 +24,34 @@ namespace GitHub.Runner.Common public void InitializeLaunchClient(Uri uri, string token) { - var httpMessageHandler = HostContext.CreateHttpClientHandler(); - this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true); + // Using default 100 timeout + RawClientHttpRequestSettings settings = VssUtil.GetHttpRequestSettings(null); + + // Create retry handler + IEnumerable delegatingHandlers = new List(); + if (settings.MaxRetryRequest > 0) + { + delegatingHandlers = new DelegatingHandler[] { new VssHttpRetryMessageHandler(settings.MaxRetryRequest) }; + } + + // Setup RawHttpMessageHandler without credentials + var httpMessageHandler = new RawHttpMessageHandler(new NoOpCredentials(null), settings); + var pipeline = HttpClientFactory.CreatePipeline(httpMessageHandler, delegatingHandlers); + + this._launchClient = new LaunchHttpClient(uri, pipeline, token, disposeHandler: true); } public Task ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, - CancellationToken cancellationToken) + CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors) { if (_launchClient != null) { - return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList, - cancellationToken: cancellationToken); + if (!displayHelpfulActionsDownloadErrors) + { + return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList, + cancellationToken: cancellationToken); + } + return _launchClient.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, cancellationToken); } throw new InvalidOperationException("Launch client is not initialized."); diff --git a/src/Runner.Common/RunServer.cs b/src/Runner.Common/RunServer.cs index fbd9ff96a..b57d2754b 100644 --- a/src/Runner.Common/RunServer.cs +++ b/src/Runner.Common/RunServer.cs @@ -5,6 +5,7 @@ using System.Threading.Tasks; using GitHub.Actions.RunService.WebApi; using GitHub.DistributedTask.Pipelines; using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; using GitHub.Services.Common; using Sdk.RSWebApi.Contracts; @@ -17,7 +18,7 @@ namespace GitHub.Runner.Common { Task ConnectAsync(Uri serverUrl, VssCredentials credentials); - Task GetJobMessageAsync(string id, CancellationToken token); + Task GetJobMessageAsync(string id, string billingOwnerId, CancellationToken token); Task CompleteJobAsync( Guid planId, @@ -27,6 +28,8 @@ namespace GitHub.Runner.Common IList stepResults, IList jobAnnotations, string environmentUrl, + IList telemetry, + string billingOwnerId, CancellationToken token); Task RenewJobAsync(Guid planId, Guid jobId, CancellationToken token); @@ -56,12 +59,15 @@ namespace GitHub.Runner.Common } } - public Task GetJobMessageAsync(string id, CancellationToken cancellationToken) + public Task GetJobMessageAsync(string id, string billingOwnerId, CancellationToken cancellationToken) { CheckConnection(); return RetryRequest( - async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken, - shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException); + async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, VarUtil.OS, billingOwnerId, cancellationToken), cancellationToken, + shouldRetry: ex => + ex is not TaskOrchestrationJobNotFoundException && // HTTP status 404 + ex is not TaskOrchestrationJobAlreadyAcquiredException && // HTTP status 409 + ex is not TaskOrchestrationJobUnprocessableException); // HTTP status 422 } public Task CompleteJobAsync( @@ -72,18 +78,25 @@ namespace GitHub.Runner.Common IList stepResults, IList jobAnnotations, string environmentUrl, + IList telemetry, + string billingOwnerId, CancellationToken cancellationToken) { CheckConnection(); return RetryRequest( - async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, cancellationToken), cancellationToken); + async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, environmentUrl, telemetry, billingOwnerId, cancellationToken), cancellationToken, + shouldRetry: ex => + ex is not VssUnauthorizedException && // HTTP status 401 + ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404 } public Task RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken) { CheckConnection(); return RetryRequest( - async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken); + async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken, + shouldRetry: ex => + ex is not TaskOrchestrationJobNotFoundException); // HTTP status 404 } } } diff --git a/src/Runner.Common/Runner.Common.csproj b/src/Runner.Common/Runner.Common.csproj index 329a024aa..6c4635626 100644 --- a/src/Runner.Common/Runner.Common.csproj +++ b/src/Runner.Common/Runner.Common.csproj @@ -1,11 +1,11 @@ - net6.0 + net8.0 Library win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) @@ -15,11 +15,11 @@ - + - - - + + + diff --git a/src/Runner.Common/RunnerDotcomServer.cs b/src/Runner.Common/RunnerDotcomServer.cs index 0f6d3ce1d..c021e8bcc 100644 --- a/src/Runner.Common/RunnerDotcomServer.cs +++ b/src/Runner.Common/RunnerDotcomServer.cs @@ -19,6 +19,7 @@ namespace GitHub.Runner.Common Task AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey); Task ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey); + Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId); Task> GetRunnerGroupsAsync(string githubUrl, string githubToken); } @@ -43,89 +44,15 @@ namespace GitHub.Runner.Common public async Task> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName) { - var githubApiUrl = ""; - var gitHubUrlBuilder = new UriBuilder(githubUrl); - var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries); - if (path.Length == 1) - { - // org runner - if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; - } - else - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; - } - } - else if (path.Length == 2) - { - // repo or enterprise runner. - if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; - } - else - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}"; - } - } - else - { - throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise."); - } - + var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners?name={Uri.EscapeDataString(agentName)}"; var runnersList = await RetryRequest(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools"); - return runnersList.ToTaskAgents(); } public async Task> GetRunnerGroupsAsync(string githubUrl, string githubToken) { - var githubApiUrl = ""; - var gitHubUrlBuilder = new UriBuilder(githubUrl); - var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries); - if (path.Length == 1) - { - // org runner - if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups"; - } - else - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups"; - } - } - else if (path.Length == 2) - { - // repo or enterprise runner. - if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups"; - } - else - { - githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups"; - } - } - else - { - throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise."); - } - + var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runner-groups"; var agentPools = await RetryRequest(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools"); - return agentPools?.ToAgentPoolList(); } @@ -176,6 +103,12 @@ namespace GitHub.Runner.Common return await RetryRequest(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body); } + public async Task DeleteRunnerAsync(string githubUrl, string githubToken, ulong runnerId) + { + var githubApiUrl = $"{GetEntityUrl(githubUrl)}/runners/{runnerId}"; + await RetryRequest(githubApiUrl, githubToken, RequestType.Delete, 3, "Failed to delete agent"); + } + private async Task RetryRequest(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null) { int retry = 0; @@ -192,13 +125,22 @@ namespace GitHub.Runner.Common try { HttpResponseMessage response = null; - if (requestType == RequestType.Get) + switch (requestType) { - response = await httpClient.GetAsync(githubApiUrl); - } - else - { - response = await httpClient.PostAsync(githubApiUrl, body); + case RequestType.Get: + response = await httpClient.GetAsync(githubApiUrl); + break; + case RequestType.Post: + response = await httpClient.PostAsync(githubApiUrl, body); + break; + case RequestType.Patch: + response = await httpClient.PatchAsync(githubApiUrl, body); + break; + case RequestType.Delete: + response = await httpClient.DeleteAsync(githubApiUrl); + break; + default: + throw new ArgumentOutOfRangeException(nameof(requestType), requestType, null); } if (response != null) @@ -233,5 +175,61 @@ namespace GitHub.Runner.Common await Task.Delay(backOff); } } + + private string GetEntityUrl(string githubUrl) + { + var githubApiUrl = ""; + var gitHubUrlBuilder = new UriBuilder(githubUrl); + var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries); + var isOrgRunner = path.Length == 1; + var isRepoOrEnterpriseRunner = path.Length == 2; + var isRepoRunner = isRepoOrEnterpriseRunner && !string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase); + + if (isOrgRunner) + { + // org runner + if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions"; + } + else + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions"; + } + } + else if (isRepoOrEnterpriseRunner) + { + // Repository Runner + if (isRepoRunner) + { + if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/repos/{path[0]}/{path[1]}/actions"; + } + else + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/repos/{path[0]}/{path[1]}/actions"; + } + } + else + { + // Enterprise Runner + if (UrlUtil.IsHostedServer(gitHubUrlBuilder)) + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions"; + } + else + { + githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions"; + } + } + } + else + { + throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise."); + } + + return githubApiUrl; + } } } diff --git a/src/Runner.Common/RunnerServer.cs b/src/Runner.Common/RunnerServer.cs index 139ac684f..b2e4e498a 100644 --- a/src/Runner.Common/RunnerServer.cs +++ b/src/Runner.Common/RunnerServer.cs @@ -1,11 +1,11 @@ -using GitHub.DistributedTask.WebApi; -using System; +using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -using GitHub.Services.WebApi; -using GitHub.Services.Common; +using GitHub.DistributedTask.WebApi; using GitHub.Runner.Sdk; +using GitHub.Services.Common; +using GitHub.Services.WebApi; namespace GitHub.Runner.Common { @@ -50,7 +50,10 @@ namespace GitHub.Runner.Common Task GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken); // agent update - Task UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace); + Task UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default); + + // runner config refresh + Task RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken); } public sealed class RunnerServer : RunnerService, IRunnerServer @@ -315,10 +318,17 @@ namespace GitHub.Runner.Common return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken); } - public Task UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace) + public Task UpdateAgentUpdateStateAsync(int agentPoolId, ulong agentId, string currentState, string trace, CancellationToken cancellationToken = default) { CheckConnection(RunnerConnectionType.Generic); - return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace); + return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace, cancellationToken: cancellationToken); + } + + // runner config refresh + public Task RefreshRunnerConfigAsync(int agentId, string configType, string encodedRunnerConfig, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.RefreshRunnerConfigAsync(agentId, configType, encodedRunnerConfig, cancellationToken: cancellationToken); } } } diff --git a/src/Runner.Common/RunnerService.cs b/src/Runner.Common/RunnerService.cs index a18ff9674..ccaa83f69 100644 --- a/src/Runner.Common/RunnerService.cs +++ b/src/Runner.Common/RunnerService.cs @@ -70,7 +70,8 @@ namespace GitHub.Runner.Common protected async Task RetryRequest(Func func, CancellationToken cancellationToken, - int maxRetryAttemptsCount = 5 + int maxAttempts = 5, + Func shouldRetry = null ) { async Task wrappedFunc() @@ -78,31 +79,31 @@ namespace GitHub.Runner.Common await func(); return Unit.Value; } - await RetryRequest(wrappedFunc, cancellationToken, maxRetryAttemptsCount); + await RetryRequest(wrappedFunc, cancellationToken, maxAttempts, shouldRetry); } protected async Task RetryRequest(Func> func, CancellationToken cancellationToken, - int maxRetryAttemptsCount = 5, + int maxAttempts = 5, Func shouldRetry = null ) { - var retryCount = 0; + var attempt = 0; while (true) { - retryCount++; + attempt++; cancellationToken.ThrowIfCancellationRequested(); try { return await func(); } // TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122 - catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex))) + catch (Exception ex) when (attempt < maxAttempts && (shouldRetry == null || shouldRetry(ex))) { Trace.Error("Catch exception during request"); Trace.Error(ex); var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15)); - Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left."); + Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxAttempts - attempt} attempt left."); await Task.Delay(backOff, cancellationToken); } } diff --git a/src/Runner.Common/Util/NodeUtil.cs b/src/Runner.Common/Util/NodeUtil.cs index f2c01d7d3..ff1a7a0af 100644 --- a/src/Runner.Common/Util/NodeUtil.cs +++ b/src/Runner.Common/Util/NodeUtil.cs @@ -1,12 +1,35 @@ using System; +using System.Collections.Generic; using System.Collections.ObjectModel; +using GitHub.Runner.Sdk; namespace GitHub.Runner.Common.Util { public static class NodeUtil { - private const string _defaultNodeVersion = "node16"; - public static readonly ReadOnlyCollection BuiltInNodeVersions = new(new[] { "node16", "node20" }); + /// + /// Represents details about an environment variable, including its value and source + /// + private class EnvironmentVariableInfo + { + /// + /// Gets or sets whether the value evaluates to true + /// + public bool IsTrue { get; set; } + + /// + /// Gets or sets whether the value came from the workflow environment + /// + public bool FromWorkflow { get; set; } + + /// + /// Gets or sets whether the value came from the system environment + /// + public bool FromSystem { get; set; } + } + + private const string _defaultNodeVersion = "node20"; + public static readonly ReadOnlyCollection BuiltInNodeVersions = new(new[] { "node20" }); public static string GetInternalNodeVersion() { var forcedInternalNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion); @@ -18,5 +41,122 @@ namespace GitHub.Runner.Common.Util } return _defaultNodeVersion; } + /// + /// Determines the appropriate Node version for Actions to use + /// + /// Optional dictionary containing workflow-level environment variables + /// Feature flag indicating if Node 24 should be the default + /// Feature flag indicating if Node 24 is required + /// The Node version to use (node20 or node24) and warning message if both env vars are set + public static (string nodeVersion, string warningMessage) DetermineActionsNodeVersion( + IDictionary workflowEnvironment = null, + bool useNode24ByDefault = false, + bool requireNode24 = false) + { + // Phase 3: Always use Node 24 regardless of environment variables + if (requireNode24) + { + return (Constants.Runner.NodeMigration.Node24, null); + } + + // Get environment variable details with source information + var forceNode24Details = GetEnvironmentVariableDetails( + Constants.Runner.NodeMigration.ForceNode24Variable, workflowEnvironment); + + var allowUnsecureNodeDetails = GetEnvironmentVariableDetails( + Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, workflowEnvironment); + + bool forceNode24 = forceNode24Details.IsTrue; + bool allowUnsecureNode = allowUnsecureNodeDetails.IsTrue; + string warningMessage = null; + + // Check if both flags are set from the same source + bool bothFromWorkflow = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue && + forceNode24Details.FromWorkflow && allowUnsecureNodeDetails.FromWorkflow; + + bool bothFromSystem = forceNode24Details.IsTrue && allowUnsecureNodeDetails.IsTrue && + forceNode24Details.FromSystem && allowUnsecureNodeDetails.FromSystem; + + // Handle the case when both are set in the same source + if (bothFromWorkflow || bothFromSystem) + { + string source = bothFromWorkflow ? "workflow" : "system"; + string defaultVersion = useNode24ByDefault ? Constants.Runner.NodeMigration.Node24 : Constants.Runner.NodeMigration.Node20; + warningMessage = $"Both {Constants.Runner.NodeMigration.ForceNode24Variable} and {Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable} environment variables are set to true in the {source} environment. This is likely a configuration error. Using the default Node version: {defaultVersion}."; + return (defaultVersion, warningMessage); + } + + // Phase 2: Node 24 is the default + if (useNode24ByDefault) + { + if (allowUnsecureNode) + { + return (Constants.Runner.NodeMigration.Node20, null); + } + + return (Constants.Runner.NodeMigration.Node24, null); + } + + // Phase 1: Node 20 is the default + if (forceNode24) + { + return (Constants.Runner.NodeMigration.Node24, null); + } + + return (Constants.Runner.NodeMigration.Node20, null); + } + + /// + /// Checks if Node24 is requested but running on ARM32 Linux, and determines if fallback is needed. + /// + /// The preferred Node version + /// A tuple containing the adjusted node version and an optional warning message + public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(string preferredVersion) + { + if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) && + Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) && + Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux)) + { + return (Constants.Runner.NodeMigration.Node20, "Node 24 is not supported on Linux ARM32 platforms. Falling back to Node 20."); + } + + return (preferredVersion, null); + } + + /// + /// Gets detailed information about an environment variable from both workflow and system environments + /// + /// The name of the environment variable + /// Optional dictionary containing workflow-level environment variables + /// An EnvironmentVariableInfo object containing details about the variable from both sources + private static EnvironmentVariableInfo GetEnvironmentVariableDetails(string variableName, IDictionary workflowEnvironment) + { + var info = new EnvironmentVariableInfo(); + + // Check workflow environment + bool foundInWorkflow = false; + string workflowValue = null; + + if (workflowEnvironment != null && workflowEnvironment.TryGetValue(variableName, out workflowValue)) + { + foundInWorkflow = true; + info.FromWorkflow = true; + info.IsTrue = StringUtil.ConvertToBoolean(workflowValue); // Workflow value takes precedence for the boolean value + } + + // Also check system environment + string systemValue = Environment.GetEnvironmentVariable(variableName); + bool foundInSystem = !string.IsNullOrEmpty(systemValue); + + info.FromSystem = foundInSystem; + + // If not found in workflow, use system values + if (!foundInWorkflow) + { + info.IsTrue = StringUtil.ConvertToBoolean(systemValue); + } + + return info; + } } } diff --git a/src/Runner.Listener/BrokerMessageListener.cs b/src/Runner.Listener/BrokerMessageListener.cs index 3781855b7..7c9ca401c 100644 --- a/src/Runner.Listener/BrokerMessageListener.cs +++ b/src/Runner.Listener/BrokerMessageListener.cs @@ -9,11 +9,12 @@ using System.Threading; using System.Threading.Tasks; using GitHub.DistributedTask.WebApi; using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Sdk; using GitHub.Services.Common; -using GitHub.Runner.Common.Util; using GitHub.Services.OAuth; +using GitHub.Services.WebApi; namespace GitHub.Runner.Listener { @@ -22,34 +23,67 @@ namespace GitHub.Runner.Listener private RunnerSettings _settings; private ITerminal _term; private TimeSpan _getNextMessageRetryInterval; - private TaskAgentStatus runnerStatus = TaskAgentStatus.Online; + private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online; private CancellationTokenSource _getMessagesTokenSource; private VssCredentials _creds; + private VssCredentials _credsV2; private TaskAgentSession _session; + private IRunnerServer _runnerServer; private IBrokerServer _brokerServer; + private ICredentialManager _credMgr; private readonly Dictionary _sessionCreationExceptionTracker = new(); private bool _accessTokenRevoked = false; private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30); private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); + private bool _needRefreshCredsV2 = false; + private bool _handlerInitialized = false; + private bool _isMigratedSettings = false; + private const int _maxMigratedSettingsRetries = 3; + private int _migratedSettingsRetryCount = 0; + public BrokerMessageListener() + { + } + + public BrokerMessageListener(RunnerSettings settings, bool isMigratedSettings = false) + { + _settings = settings; + _isMigratedSettings = isMigratedSettings; + } public override void Initialize(IHostContext hostContext) { base.Initialize(hostContext); _term = HostContext.GetService(); + _runnerServer = HostContext.GetService(); _brokerServer = HostContext.GetService(); + _credMgr = HostContext.GetService(); } - public async Task CreateSessionAsync(CancellationToken token) + public async Task CreateSessionAsync(CancellationToken token) { Trace.Entering(); - // Settings - var configManager = HostContext.GetService(); - _settings = configManager.LoadSettings(); - var serverUrl = _settings.ServerUrlV2; + // Load settings if not provided through constructor + if (_settings == null) + { + var configManager = HostContext.GetService(); + _settings = configManager.LoadSettings(); + Trace.Info("Settings loaded from config manager"); + } + else + { + Trace.Info("Using provided settings"); + if (_isMigratedSettings) + { + Trace.Info("Using migrated settings from .runner_migrated"); + } + } + + var serverUrlV2 = _settings.ServerUrlV2; + var serverUrl = _settings.ServerUrl; Trace.Info(_settings); if (string.IsNullOrEmpty(_settings.ServerUrlV2)) @@ -59,8 +93,7 @@ namespace GitHub.Runner.Listener // Create connection. Trace.Info("Loading Credentials"); - var credMgr = HostContext.GetService(); - _creds = credMgr.LoadCredentials(); + _creds = _credMgr.LoadCredentials(allowAuthUrlV2: false); var agent = new TaskAgentReference { @@ -69,7 +102,8 @@ namespace GitHub.Runner.Listener Version = BuildConstants.RunnerPackage.Version, OSDescription = RuntimeInformation.OSDescription, }; - string sessionName = $"{Environment.MachineName ?? "RUNNER"}"; + var currentProcess = Process.GetCurrentProcess(); + string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})"; var taskAgentSession = new TaskAgentSession(sessionName, agent); string errorMessage = string.Empty; @@ -82,9 +116,18 @@ namespace GitHub.Runner.Listener try { Trace.Info("Connecting to the Broker Server..."); - await _brokerServer.ConnectAsync(new Uri(serverUrl), _creds); + _credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true); + await _brokerServer.ConnectAsync(new Uri(serverUrlV2), _credsV2); Trace.Info("VssConnection created"); + if (!string.IsNullOrEmpty(serverUrl) && + !string.Equals(serverUrl, serverUrlV2, StringComparison.OrdinalIgnoreCase)) + { + Trace.Info("Connecting to the Runner server..."); + await _runnerServer.ConnectAsync(new Uri(serverUrl), _creds); + Trace.Info("VssConnection created"); + } + _term.WriteLine(); _term.WriteSuccessMessage("Connected to GitHub"); _term.WriteLine(); @@ -99,7 +142,14 @@ namespace GitHub.Runner.Listener encounteringError = false; } - return true; + if (!_handlerInitialized) + { + // Register event handler for auth migration state change + HostContext.AuthMigrationChanged += HandleAuthMigrationChanged; + _handlerInitialized = true; + } + + return CreateSessionResult.Success; } catch (OperationCanceledException) when (token.IsCancellationRequested) { @@ -117,31 +167,57 @@ namespace GitHub.Runner.Listener Trace.Error("Catch exception during create session."); Trace.Error(ex); - if (ex is VssOAuthTokenRequestException vssOAuthEx && _creds.Federated is VssOAuthCredential vssOAuthCred) + // If using migrated settings, limit the number of retries before returning failure + if (_isMigratedSettings) + { + _migratedSettingsRetryCount++; + Trace.Warning($"Migrated settings retry {_migratedSettingsRetryCount} of {_maxMigratedSettingsRetries}"); + + if (_migratedSettingsRetryCount >= _maxMigratedSettingsRetries) + { + Trace.Warning("Reached maximum retry attempts for migrated settings. Returning failure to try default settings."); + return CreateSessionResult.Failure; + } + } + + if (!HostContext.AllowAuthMigration && + ex is VssOAuthTokenRequestException vssOAuthEx && + _credsV2.Federated is VssOAuthCredential vssOAuthCred) { // "invalid_client" means the runner registration has been deleted from the server. if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase)) { _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); - return false; + return CreateSessionResult.Failure; } // Check whether we get 401 because the runner registration already removed by the service. // If the runner registration get deleted, we can't exchange oauth token. Trace.Error("Test oauth app registration."); - var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl)); + var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrlV2)); var authError = await oauthTokenProvider.ValidateCredentialAsync(token); if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase)) { _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); - return false; + return CreateSessionResult.Failure; } } - if (!IsSessionCreationExceptionRetriable(ex)) + if (!HostContext.AllowAuthMigration && + !IsSessionCreationExceptionRetriable(ex)) { _term.WriteError($"Failed to create session. {ex.Message}"); - return false; + if (ex is TaskAgentSessionConflictException) + { + return CreateSessionResult.SessionConflict; + } + return CreateSessionResult.Failure; + } + + if (HostContext.AllowAuthMigration) + { + Trace.Info("Disable migration mode for 60 minutes."); + HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Session creation failed with exception: {ex}"); } if (!encounteringError) //print the message only on the first error @@ -160,6 +236,11 @@ namespace GitHub.Runner.Listener { if (_session != null && _session.SessionId != Guid.Empty) { + if (_handlerInitialized) + { + HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged; + } + if (!_accessTokenRevoked) { using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) @@ -177,7 +258,7 @@ namespace GitHub.Runner.Listener public void OnJobStatus(object sender, JobStatusEventArgs e) { Trace.Info("Received job status event. JobState: {0}", e.Status); - runnerStatus = e.Status; + _runnerStatus = e.Status; try { _getMessagesTokenSource?.Cancel(); @@ -202,8 +283,15 @@ namespace GitHub.Runner.Listener _getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); try { + if (_needRefreshCredsV2) + { + Trace.Info("Refreshing broker connection."); + await RefreshBrokerConnectionAsync(); + _needRefreshCredsV2 = false; + } + message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId, - runnerStatus, + _runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture, @@ -232,7 +320,16 @@ namespace GitHub.Runner.Listener Trace.Info("Runner OAuth token has been revoked. Unable to pull message."); throw; } - catch (AccessDeniedException e) when (e.ErrorCode == 1) + catch (HostedRunnerDeprovisionedException) + { + Trace.Info("Hosted runner has been deprovisioned."); + throw; + } + catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration) + { + throw; + } + catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration) { throw; } @@ -241,7 +338,8 @@ namespace GitHub.Runner.Listener Trace.Error("Catch exception during get next message."); Trace.Error(ex); - if (!IsGetNextMessageExceptionRetriable(ex)) + if (!HostContext.AllowAuthMigration && + !IsGetNextMessageExceptionRetriable(ex)) { throw new NonRetryableException("Get next message failed with non-retryable error.", ex); } @@ -272,8 +370,14 @@ namespace GitHub.Runner.Listener encounteringError = true; } + if (HostContext.AllowAuthMigration) + { + Trace.Info("Disable migration mode for 60 minutes."); + HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}"); + } + // re-create VssConnection before next retry - await RefreshBrokerConnection(); + await RefreshBrokerConnectionAsync(); Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds); await HostContext.Delay(_getNextMessageRetryInterval, token); @@ -303,17 +407,38 @@ namespace GitHub.Runner.Listener } } + public async Task RefreshListenerTokenAsync() + { + await RefreshBrokerConnectionAsync(); + } + public async Task DeleteMessageAsync(TaskAgentMessage message) { await Task.CompletedTask; } + public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken) + { + using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout + using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token); + Trace.Info($"Acknowledging runner request '{runnerRequestId}'."); + await _brokerServer.AcknowledgeRunnerRequestAsync( + runnerRequestId, + _session.SessionId, + _runnerStatus, + BuildConstants.RunnerPackage.Version, + VarUtil.OS, + VarUtil.OSArchitecture, + linkedCts.Token); + } + private bool IsGetNextMessageExceptionRetriable(Exception ex) { if (ex is TaskAgentNotFoundException || ex is TaskAgentPoolNotFoundException || ex is TaskAgentSessionExpiredException || ex is AccessDeniedException || + ex is RunnerNotFoundException || ex is VssUnauthorizedException) { Trace.Info($"Non-retriable exception: {ex.Message}"); @@ -398,19 +523,18 @@ namespace GitHub.Runner.Listener } } - private async Task RefreshBrokerConnection() + private async Task RefreshBrokerConnectionAsync() { - var configManager = HostContext.GetService(); - _settings = configManager.LoadSettings(); + Trace.Info("Reload credentials."); + _credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true); + await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), _credsV2); + Trace.Info("Connection to Broker Server recreated."); + } - if (string.IsNullOrEmpty(_settings.ServerUrlV2)) - { - throw new InvalidOperationException("ServerUrlV2 is not set"); - } - - var credMgr = HostContext.GetService(); - VssCredentials creds = credMgr.LoadCredentials(); - await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds); + private void HandleAuthMigrationChanged(object sender, EventArgs e) + { + Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}"); + _needRefreshCredsV2 = true; } } } diff --git a/src/Runner.Listener/Configuration/ConfigurationManager.cs b/src/Runner.Listener/Configuration/ConfigurationManager.cs index 05b93cb58..c3da7f8e5 100644 --- a/src/Runner.Listener/Configuration/ConfigurationManager.cs +++ b/src/Runner.Listener/Configuration/ConfigurationManager.cs @@ -25,6 +25,7 @@ namespace GitHub.Runner.Listener.Configuration Task UnconfigureAsync(CommandSettings command); void DeleteLocalRunnerConfig(); RunnerSettings LoadSettings(); + RunnerSettings LoadMigratedSettings(); } public sealed class ConfigurationManager : RunnerService, IConfigurationManager @@ -66,6 +67,22 @@ namespace GitHub.Runner.Listener.Configuration return settings; } + public RunnerSettings LoadMigratedSettings() + { + Trace.Info(nameof(LoadMigratedSettings)); + + // Check if migrated settings file exists + if (!_store.IsMigratedConfigured()) + { + throw new NonRetryableException("No migrated configuration found."); + } + + RunnerSettings settings = _store.GetMigratedSettings(); + Trace.Info("Migrated Settings Loaded"); + + return settings; + } + public async Task ConfigureAsync(CommandSettings command) { _term.WriteLine(); @@ -127,7 +144,7 @@ namespace GitHub.Runner.Listener.Configuration runnerSettings.ServerUrl = inputUrl; // Get the credentials credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl); - creds = credProvider.GetVssCredentials(HostContext); + creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false); Trace.Info("legacy vss cred retrieved"); } else @@ -366,10 +383,18 @@ namespace GitHub.Runner.Listener.Configuration { { "clientId", agent.Authorization.ClientId.ToString("D") }, { "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri }, - { "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() } + { "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", true).ToString() } }, }; + if (agent.Properties.GetValue("EnableAuthMigrationByDefault", false) && + agent.Properties.TryGetValue("AuthorizationUrlV2", out var authUrlV2) && + !string.IsNullOrEmpty(authUrlV2)) + { + credentialData.Data["enableAuthMigrationByDefault"] = "true"; + credentialData.Data["authorizationUrlV2"] = authUrlV2; + } + // Save the negotiated OAuth credential data _store.SaveCredential(credentialData); } @@ -384,7 +409,7 @@ namespace GitHub.Runner.Listener.Configuration if (!runnerSettings.UseV2Flow) { var credMgr = HostContext.GetService(); - VssCredentials credential = credMgr.LoadCredentials(); + VssCredentials credential = credMgr.LoadCredentials(allowAuthUrlV2: false); try { await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential); @@ -404,6 +429,20 @@ namespace GitHub.Runner.Listener.Configuration } } + // allow the server to override the serverUrlV2 and useV2Flow + if (agent.Properties.TryGetValue("ServerUrlV2", out string serverUrlV2) && + !string.IsNullOrEmpty(serverUrlV2)) + { + Trace.Info($"Service enforced serverUrlV2: {serverUrlV2}"); + runnerSettings.ServerUrlV2 = serverUrlV2; + } + + if (agent.Properties.TryGetValue("UseV2Flow", out bool useV2Flow) && useV2Flow) + { + Trace.Info($"Service enforced useV2Flow: {useV2Flow}"); + runnerSettings.UseV2Flow = useV2Flow; + } + _term.WriteSection("Runner settings"); // We will Combine() what's stored with root. Defaults to string a relative path @@ -498,41 +537,50 @@ namespace GitHub.Runner.Listener.Configuration if (isConfigured && hasCredentials) { RunnerSettings settings = _store.GetSettings(); - var credentialManager = HostContext.GetService(); - // Get the credentials - VssCredentials creds = null; - if (string.IsNullOrEmpty(settings.GitHubUrl)) - { - var credProvider = GetCredentialProvider(command, settings.ServerUrl); - creds = credProvider.GetVssCredentials(HostContext); - Trace.Info("legacy vss cred retrieved"); - } - else + if (settings.UseV2Flow) { var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove"); - GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove); - creds = authResult.ToVssCredentials(); - Trace.Info("cred retrieved via GitHub auth"); - } - - // Determine the service deployment type based on connection data. (Hosted/OnPremises) - await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds); - - var agents = await _runnerServer.GetAgentsAsync(settings.AgentName); - Trace.Verbose("Returns {0} agents", agents.Count); - TaskAgent agent = agents.FirstOrDefault(); - if (agent == null) - { - _term.WriteLine("Does not exist. Skipping " + currentAction); + await _dotcomServer.DeleteRunnerAsync(settings.GitHubUrl, deletionToken, settings.AgentId); } else { - await _runnerServer.DeleteAgentAsync(settings.AgentId); + var credentialManager = HostContext.GetService(); - _term.WriteLine(); - _term.WriteSuccessMessage("Runner removed successfully"); + // Get the credentials + VssCredentials creds = null; + if (string.IsNullOrEmpty(settings.GitHubUrl)) + { + var credProvider = GetCredentialProvider(command, settings.ServerUrl); + creds = credProvider.GetVssCredentials(HostContext, allowAuthUrlV2: false); + Trace.Info("legacy vss cred retrieved"); + } + else + { + var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove"); + GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove); + creds = authResult.ToVssCredentials(); + Trace.Info("cred retrieved via GitHub auth"); + } + + // Determine the service deployment type based on connection data. (Hosted/OnPremises) + await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds); + + var agents = await _runnerServer.GetAgentsAsync(settings.AgentName); + Trace.Verbose("Returns {0} agents", agents.Count); + TaskAgent agent = agents.FirstOrDefault(); + if (agent == null) + { + _term.WriteLine("Does not exist. Skipping " + currentAction); + } + else + { + await _runnerServer.DeleteAgentAsync(settings.AgentId); + } } + + _term.WriteLine(); + _term.WriteSuccessMessage("Runner removed successfully"); } else { diff --git a/src/Runner.Listener/Configuration/CredentialManager.cs b/src/Runner.Listener/Configuration/CredentialManager.cs index f13fb1207..89e76a22d 100644 --- a/src/Runner.Listener/Configuration/CredentialManager.cs +++ b/src/Runner.Listener/Configuration/CredentialManager.cs @@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration public interface ICredentialManager : IRunnerService { ICredentialProvider GetCredentialProvider(string credType); - VssCredentials LoadCredentials(); + VssCredentials LoadCredentials(bool allowAuthUrlV2); } public class CredentialManager : RunnerService, ICredentialManager @@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration return creds; } - public VssCredentials LoadCredentials() + public VssCredentials LoadCredentials(bool allowAuthUrlV2) { IConfigurationStore store = HostContext.GetService(); @@ -51,21 +51,16 @@ namespace GitHub.Runner.Listener.Configuration CredentialData credData = store.GetCredentials(); var migratedCred = store.GetMigratedCredentials(); - if (migratedCred != null) + if (migratedCred != null && + migratedCred.Scheme == Constants.Configuration.OAuth) { credData = migratedCred; - - // Re-write .credentials with Token URL - store.SaveCredential(credData); - - // Delete .credentials_migrated - store.DeleteMigratedCredential(); } ICredentialProvider credProv = GetCredentialProvider(credData.Scheme); credProv.CredentialData = credData; - VssCredentials creds = credProv.GetVssCredentials(HostContext); + VssCredentials creds = credProv.GetVssCredentials(HostContext, allowAuthUrlV2); return creds; } diff --git a/src/Runner.Listener/Configuration/CredentialProvider.cs b/src/Runner.Listener/Configuration/CredentialProvider.cs index def579a0d..c6bac758d 100644 --- a/src/Runner.Listener/Configuration/CredentialProvider.cs +++ b/src/Runner.Listener/Configuration/CredentialProvider.cs @@ -1,7 +1,7 @@ using System; -using GitHub.Services.Common; using GitHub.Runner.Common; using GitHub.Runner.Sdk; +using GitHub.Services.Common; using GitHub.Services.OAuth; namespace GitHub.Runner.Listener.Configuration @@ -10,7 +10,7 @@ namespace GitHub.Runner.Listener.Configuration { Boolean RequireInteractive { get; } CredentialData CredentialData { get; set; } - VssCredentials GetVssCredentials(IHostContext context); + VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2); void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl); } @@ -25,7 +25,7 @@ namespace GitHub.Runner.Listener.Configuration public virtual Boolean RequireInteractive => false; public CredentialData CredentialData { get; set; } - public abstract VssCredentials GetVssCredentials(IHostContext context); + public abstract VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2); public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl); } @@ -33,7 +33,7 @@ namespace GitHub.Runner.Listener.Configuration { public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { } - public override VssCredentials GetVssCredentials(IHostContext context) + public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2) { ArgUtil.NotNull(context, nameof(context)); Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential)); diff --git a/src/Runner.Listener/Configuration/OAuthCredential.cs b/src/Runner.Listener/Configuration/OAuthCredential.cs index a0d2042b9..b09d67754 100644 --- a/src/Runner.Listener/Configuration/OAuthCredential.cs +++ b/src/Runner.Listener/Configuration/OAuthCredential.cs @@ -22,10 +22,18 @@ namespace GitHub.Runner.Listener.Configuration // Nothing to verify here } - public override VssCredentials GetVssCredentials(IHostContext context) + public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2) { var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null); var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null); + var authorizationUrlV2 = this.CredentialData.Data.GetValueOrDefault("authorizationUrlV2", null); + + if (allowAuthUrlV2 && + !string.IsNullOrEmpty(authorizationUrlV2) && + context.AllowAuthMigration) + { + authorizationUrl = authorizationUrlV2; + } // For back compat with .credential file that doesn't has 'oauthEndpointUrl' section var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl); diff --git a/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs b/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs index 15291be43..a404a674e 100644 --- a/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs +++ b/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs @@ -1,4 +1,5 @@ #if OS_WINDOWS +#pragma warning disable CA1416 using System.IO; using System.Security.Cryptography; using System.Text; @@ -84,4 +85,5 @@ namespace GitHub.Runner.Listener.Configuration } } } +#pragma warning restore CA1416 #endif diff --git a/src/Runner.Listener/ErrorThrottler.cs b/src/Runner.Listener/ErrorThrottler.cs new file mode 100644 index 000000000..8525c7285 --- /dev/null +++ b/src/Runner.Listener/ErrorThrottler.cs @@ -0,0 +1,44 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Services.Common; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(ErrorThrottler))] + public interface IErrorThrottler : IRunnerService + { + void Reset(); + Task IncrementAndWaitAsync(CancellationToken token); + } + + public sealed class ErrorThrottler : RunnerService, IErrorThrottler + { + internal static readonly TimeSpan MinBackoff = TimeSpan.FromSeconds(1); + internal static readonly TimeSpan MaxBackoff = TimeSpan.FromMinutes(1); + internal static readonly TimeSpan BackoffCoefficient = TimeSpan.FromSeconds(1); + private int _count = 0; + + public void Reset() + { + _count = 0; + } + + public async Task IncrementAndWaitAsync(CancellationToken token) + { + if (++_count <= 1) + { + return; + } + + TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff( + attempt: _count - 2, // 0-based attempt + minBackoff: MinBackoff, + maxBackoff: MaxBackoff, + deltaBackoff: BackoffCoefficient); + Trace.Warning($"Back off {backoff.TotalSeconds} seconds before next attempt. Current consecutive error count: {_count}"); + await HostContext.Delay(backoff, token); + } + } +} diff --git a/src/Runner.Listener/JobDispatcher.cs b/src/Runner.Listener/JobDispatcher.cs index 0efdaba24..f98204b42 100644 --- a/src/Runner.Listener/JobDispatcher.cs +++ b/src/Runner.Listener/JobDispatcher.cs @@ -35,7 +35,7 @@ namespace GitHub.Runner.Listener // This implementation of IJobDispatcher is not thread safe. // It is based on the fact that the current design of the runner is a dequeue // and processes one message from the message queue at a time. - // In addition, it only executes one job every time, + // In addition, it only executes one job every time, // and the server will not send another job while this one is still running. public sealed class JobDispatcher : RunnerService, IJobDispatcher { @@ -110,7 +110,12 @@ namespace GitHub.Runner.Listener { var jwt = JsonWebToken.Create(accessToken); var claims = jwt.ExtractClaims(); - orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value; + orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orch_id", StringComparison.OrdinalIgnoreCase))?.Value; + if (string.IsNullOrEmpty(orchestrationId)) + { + orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value; + } + if (!string.IsNullOrEmpty(orchestrationId)) { Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims"); @@ -545,14 +550,36 @@ namespace GitHub.Runner.Listener detailInfo = string.Join(Environment.NewLine, workerOutput); Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result."); - var jobServer = await InitializeJobServerAsync(systemConnection); - await LogWorkerProcessUnhandledException(jobServer, message, detailInfo); - - // Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space. - if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase)) + try { - Trace.Info($"Finish job with result 'Failed' due to IOException."); - await ForceFailJob(jobServer, message, detailInfo); + var jobServer = await InitializeJobServerAsync(systemConnection); + var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; + unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash; + switch (jobServer) + { + case IJobServer js: + { + await LogWorkerProcessUnhandledException(js, message, unhandledExceptionIssue); + // Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space. + if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase)) + { + Trace.Info($"Finish job with result 'Failed' due to IOException."); + await ForceFailJob(js, message); + } + + break; + } + case IRunServer rs: + await ForceFailJob(rs, message, unhandledExceptionIssue); + break; + default: + throw new NotSupportedException($"JobServer type '{jobServer.GetType().Name}' is not supported."); + } + } + catch (Exception ex) + { + Trace.Error($"Catch exception during log worker process unhandled exception."); + Trace.Error(ex); } } @@ -644,7 +671,7 @@ namespace GitHub.Runner.Listener } } - // wait worker to exit + // wait worker to exit // if worker doesn't exit within timeout, then kill worker. completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken)); @@ -1131,86 +1158,65 @@ namespace GitHub.Runner.Listener } // log an error issue to job level timeline record - private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo) + private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, Issue issue) { - if (server is IJobServer jobServer) + try { - try - { - var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None); - ArgUtil.NotNull(timeline, nameof(timeline)); + var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None); + ArgUtil.NotNull(timeline, nameof(timeline)); - TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); - ArgUtil.NotNull(jobRecord, nameof(jobRecord)); + TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); + ArgUtil.NotNull(jobRecord, nameof(jobRecord)); - var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; - unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash; - jobRecord.ErrorCount++; - jobRecord.Issues.Add(unhandledExceptionIssue); + jobRecord.ErrorCount++; + jobRecord.Issues.Add(issue); - if (message.Variables.TryGetValue("DistributedTask.MarkJobAsFailedOnWorkerCrash", out var markJobAsFailedOnWorkerCrash) && - StringUtil.ConvertToBoolean(markJobAsFailedOnWorkerCrash?.Value)) - { - Trace.Info("Mark the job as failed since the worker crashed"); - jobRecord.Result = TaskResult.Failed; - // mark the job as completed so service will pickup the result - jobRecord.State = TimelineRecordState.Completed; - } + Trace.Info("Mark the job as failed since the worker crashed"); + jobRecord.Result = TaskResult.Failed; + // mark the job as completed so service will pickup the result + jobRecord.State = TimelineRecordState.Completed; - await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); - } - catch (Exception ex) - { - Trace.Error("Fail to report unhandled exception from Runner.Worker process"); - Trace.Error(ex); - } + await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); } - else + catch (Exception ex) { - Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo); - return; + Trace.Error("Fail to report unhandled exception from Runner.Worker process"); + Trace.Error(ex); } } // raise job completed event to fail the job. - private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo) + private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message) { - if (server is IJobServer jobServer) + try { - try - { - var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed); - await jobServer.RaisePlanEventAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None); - } - catch (Exception ex) - { - Trace.Error("Fail to raise JobCompletedEvent back to service."); - Trace.Error(ex); - } + var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed); + await jobServer.RaisePlanEventAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None); } - else if (server is IRunServer runServer) + catch (Exception ex) { - try - { - var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo }; - var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation(); - var jobAnnotations = new List(); - if (unhandledAnnotation.HasValue) - { - jobAnnotations.Add(unhandledAnnotation.Value); - } + Trace.Error("Fail to raise JobCompletedEvent back to service."); + Trace.Error(ex); + } + } - await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, CancellationToken.None); - } - catch (Exception ex) - { - Trace.Error("Fail to raise job completion back to service."); - Trace.Error(ex); - } - } - else + private async Task ForceFailJob(IRunServer runServer, Pipelines.AgentJobRequestMessage message, Issue issue) + { + try { - throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported."); + var annotation = issue.ToAnnotation(); + var jobAnnotations = new List(); + if (annotation.HasValue) + { + jobAnnotations.Add(annotation.Value); + } + + await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, environmentUrl: null, telemetry: null, billingOwnerId: message.BillingOwnerId, CancellationToken.None); + } + catch (Exception ex) + { + Trace.Error("Fail to raise job completion back to service."); + Trace.Error(ex); } } diff --git a/src/Runner.Listener/MessageListener.cs b/src/Runner.Listener/MessageListener.cs index 04e0e5727..ef06dd1af 100644 --- a/src/Runner.Listener/MessageListener.cs +++ b/src/Runner.Listener/MessageListener.cs @@ -18,13 +18,23 @@ using GitHub.Services.WebApi; namespace GitHub.Runner.Listener { + public enum CreateSessionResult + { + Success, + Failure, + SessionConflict + } + [ServiceLocator(Default = typeof(MessageListener))] public interface IMessageListener : IRunnerService { - Task CreateSessionAsync(CancellationToken token); + Task CreateSessionAsync(CancellationToken token); Task DeleteSessionAsync(); Task GetNextMessageAsync(CancellationToken token); Task DeleteMessageAsync(TaskAgentMessage message); + Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken); + + Task RefreshListenerTokenAsync(); void OnJobStatus(object sender, JobStatusEventArgs e); } @@ -35,6 +45,7 @@ namespace GitHub.Runner.Listener private ITerminal _term; private IRunnerServer _runnerServer; private IBrokerServer _brokerServer; + private ICredentialManager _credMgr; private TaskAgentSession _session; private TimeSpan _getNextMessageRetryInterval; private bool _accessTokenRevoked = false; @@ -42,11 +53,12 @@ namespace GitHub.Runner.Listener private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); private readonly Dictionary _sessionCreationExceptionTracker = new(); - private TaskAgentStatus runnerStatus = TaskAgentStatus.Online; + private TaskAgentStatus _runnerStatus = TaskAgentStatus.Online; private CancellationTokenSource _getMessagesTokenSource; private VssCredentials _creds; - - private bool _isBrokerSession = false; + private VssCredentials _credsV2; + private bool _needRefreshCredsV2 = false; + private bool _handlerInitialized = false; public override void Initialize(IHostContext hostContext) { @@ -55,9 +67,10 @@ namespace GitHub.Runner.Listener _term = HostContext.GetService(); _runnerServer = HostContext.GetService(); _brokerServer = hostContext.GetService(); + _credMgr = hostContext.GetService(); } - public async Task CreateSessionAsync(CancellationToken token) + public async Task CreateSessionAsync(CancellationToken token) { Trace.Entering(); @@ -69,8 +82,7 @@ namespace GitHub.Runner.Listener // Create connection. Trace.Info("Loading Credentials"); - var credMgr = HostContext.GetService(); - _creds = credMgr.LoadCredentials(); + _creds = _credMgr.LoadCredentials(allowAuthUrlV2: false); var agent = new TaskAgentReference { @@ -79,7 +91,8 @@ namespace GitHub.Runner.Listener Version = BuildConstants.RunnerPackage.Version, OSDescription = RuntimeInformation.OSDescription, }; - string sessionName = $"{Environment.MachineName ?? "RUNNER"}"; + var currentProcess = Process.GetCurrentProcess(); + string sessionName = $"{Environment.MachineName ?? "RUNNER"} (PID: {currentProcess.Id})"; var taskAgentSession = new TaskAgentSession(sessionName, agent); string errorMessage = string.Empty; @@ -103,15 +116,6 @@ namespace GitHub.Runner.Listener _settings.PoolId, taskAgentSession, token); - - if (_session.BrokerMigrationMessage != null) - { - Trace.Info("Runner session is in migration mode: Creating Broker session with BrokerBaseUrl: {0}", _session.BrokerMigrationMessage.BrokerBaseUrl); - await _brokerServer.ConnectAsync(_session.BrokerMigrationMessage.BrokerBaseUrl, _creds); - _session = await _brokerServer.CreateSessionAsync(taskAgentSession, token); - _isBrokerSession = true; - } - Trace.Info($"Session created."); if (encounteringError) { @@ -120,7 +124,14 @@ namespace GitHub.Runner.Listener encounteringError = false; } - return true; + if (!_handlerInitialized) + { + Trace.Info("Registering AuthMigrationChanged event handler."); + HostContext.AuthMigrationChanged += HandleAuthMigrationChanged; + _handlerInitialized = true; + } + + return CreateSessionResult.Success; } catch (OperationCanceledException) when (token.IsCancellationRequested) { @@ -144,7 +155,7 @@ namespace GitHub.Runner.Listener if (string.Equals(vssOAuthEx.Error, "invalid_client", StringComparison.OrdinalIgnoreCase)) { _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); - return false; + return CreateSessionResult.Failure; } // Check whether we get 401 because the runner registration already removed by the service. @@ -155,14 +166,18 @@ namespace GitHub.Runner.Listener if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase)) { _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure. Runner registrations are automatically deleted for runners that have not connected to the service recently."); - return false; + return CreateSessionResult.Failure; } } if (!IsSessionCreationExceptionRetriable(ex)) { _term.WriteError($"Failed to create session. {ex.Message}"); - return false; + if (ex is TaskAgentSessionConflictException) + { + return CreateSessionResult.SessionConflict; + } + return CreateSessionResult.Failure; } if (!encounteringError) //print the message only on the first error @@ -181,15 +196,15 @@ namespace GitHub.Runner.Listener { if (_session != null && _session.SessionId != Guid.Empty) { + if (_handlerInitialized) + { + HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged; + } + if (!_accessTokenRevoked) { using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) { - if (_isBrokerSession) - { - await _brokerServer.DeleteSessionAsync(ts.Token); - return; - } await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token); } } @@ -202,19 +217,17 @@ namespace GitHub.Runner.Listener public void OnJobStatus(object sender, JobStatusEventArgs e) { - if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW"))) + Trace.Info("Received job status event. JobState: {0}", e.Status); + _runnerStatus = e.Status; + try { - Trace.Info("Received job status event. JobState: {0}", e.Status); - runnerStatus = e.Status; - try - { - _getMessagesTokenSource?.Cancel(); - } - catch (ObjectDisposedException) - { - Trace.Info("_getMessagesTokenSource is already disposed."); - } + _getMessagesTokenSource?.Cancel(); } + catch (ObjectDisposedException) + { + Trace.Info("_getMessagesTokenSource is already disposed."); + } + } public async Task GetNextMessageAsync(CancellationToken token) @@ -224,6 +237,7 @@ namespace GitHub.Runner.Listener ArgUtil.NotNull(_settings, nameof(_settings)); bool encounteringError = false; int continuousError = 0; + int continuousEmptyMessage = 0; string errorMessage = string.Empty; Stopwatch heartbeat = new(); heartbeat.Restart(); @@ -237,7 +251,7 @@ namespace GitHub.Runner.Listener message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId, _session.SessionId, _lastMessageId, - runnerStatus, + _runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture, @@ -247,16 +261,21 @@ namespace GitHub.Runner.Listener // Decrypt the message body if the session is using encryption message = DecryptMessage(message); - if (message != null && message.MessageType == BrokerMigrationMessage.MessageType) { - Trace.Info("BrokerMigration message received. Polling Broker for messages..."); - var migrationMessage = JsonUtility.FromString(message.Body); - await _brokerServer.ConnectAsync(migrationMessage.BrokerBaseUrl, _creds); + _credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true); + await _brokerServer.UpdateConnectionIfNeeded(migrationMessage.BrokerBaseUrl, _credsV2); + if (_needRefreshCredsV2) + { + Trace.Info("Refreshing credentials for V2."); + await _brokerServer.ForceRefreshConnection(_credsV2); + _needRefreshCredsV2 = false; + } + message = await _brokerServer.GetRunnerMessageAsync(_session.SessionId, - runnerStatus, + _runnerStatus, BuildConstants.RunnerPackage.Version, VarUtil.OS, VarUtil.OSArchitecture, @@ -292,7 +311,16 @@ namespace GitHub.Runner.Listener _accessTokenRevoked = true; throw; } - catch (AccessDeniedException e) when (e.ErrorCode == 1) + catch (HostedRunnerDeprovisionedException) + { + Trace.Info("Hosted runner has been deprovisioned."); + throw; + } + catch (AccessDeniedException e) when (e.ErrorCode == 1 && !HostContext.AllowAuthMigration) + { + throw; + } + catch (RunnerNotFoundException) when (!HostContext.AllowAuthMigration) { throw; } @@ -301,12 +329,19 @@ namespace GitHub.Runner.Listener Trace.Error("Catch exception during get next message."); Trace.Error(ex); + // clear out potential message for broker migration, + // in case the exception is thrown from get message from broker-listener. + message = null; + // don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs. - if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && await CreateSessionAsync(token)) + if (!HostContext.AllowAuthMigration && + ex is TaskAgentSessionExpiredException && + !_settings.SkipSessionRecover && (await CreateSessionAsync(token) == CreateSessionResult.Success)) { Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session."); } - else if (!IsGetNextMessageExceptionRetriable(ex)) + else if (!HostContext.AllowAuthMigration && + !IsGetNextMessageExceptionRetriable(ex)) { throw; } @@ -333,6 +368,12 @@ namespace GitHub.Runner.Listener encounteringError = true; } + if (HostContext.AllowAuthMigration) + { + Trace.Info("Disable migration mode for 60 minutes."); + HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Get next message failed with exception: {ex}"); + } + // re-create VssConnection before next retry await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60)); @@ -347,16 +388,27 @@ namespace GitHub.Runner.Listener if (message == null) { + continuousEmptyMessage++; if (heartbeat.Elapsed > TimeSpan.FromMinutes(30)) { Trace.Info($"No message retrieved from session '{_session.SessionId}' within last 30 minutes."); heartbeat.Restart(); + continuousEmptyMessage = 0; } else { Trace.Verbose($"No message retrieved from session '{_session.SessionId}'."); } + if (continuousEmptyMessage > 50) + { + // retried more than 50 times in less than 30mins and still getting empty message + // something is not right on the service side, backoff for 15-30s before retry + _getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval); + Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds); + await HostContext.Delay(_getNextMessageRetryInterval, token); + } + continue; } @@ -379,6 +431,28 @@ namespace GitHub.Runner.Listener } } + public async Task RefreshListenerTokenAsync() + { + await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60)); + _credsV2 = _credMgr.LoadCredentials(allowAuthUrlV2: true); + await _brokerServer.ForceRefreshConnection(_credsV2); + } + + public async Task AcknowledgeMessageAsync(string runnerRequestId, CancellationToken cancellationToken) + { + using var timeoutCts = new CancellationTokenSource(TimeSpan.FromSeconds(5)); // Short timeout + using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token); + Trace.Info($"Acknowledging runner request '{runnerRequestId}'."); + await _brokerServer.AcknowledgeRunnerRequestAsync( + runnerRequestId, + _session.SessionId, + _runnerStatus, + BuildConstants.RunnerPackage.Version, + VarUtil.OS, + VarUtil.OSArchitecture, + linkedCts.Token); + } + private TaskAgentMessage DecryptMessage(TaskAgentMessage message) { if (_session.EncryptionKey == null || @@ -428,6 +502,7 @@ namespace GitHub.Runner.Listener ex is TaskAgentPoolNotFoundException || ex is TaskAgentSessionExpiredException || ex is AccessDeniedException || + ex is RunnerNotFoundException || ex is VssUnauthorizedException) { Trace.Info($"Non-retriable exception: {ex.Message}"); @@ -494,7 +569,8 @@ namespace GitHub.Runner.Listener } else if (ex is TaskAgentPoolNotFoundException || ex is AccessDeniedException || - ex is VssUnauthorizedException) + ex is VssUnauthorizedException || + (ex is VssOAuthTokenRequestException oauthEx && oauthEx.Error != "server_error")) { Trace.Info($"Non-retriable exception: {ex.Message}"); return false; @@ -505,5 +581,11 @@ namespace GitHub.Runner.Listener return true; } } + + private void HandleAuthMigrationChanged(object sender, EventArgs e) + { + Trace.Info($"Auth migration changed. Current allow auth migration state: {HostContext.AllowAuthMigration}"); + _needRefreshCredsV2 = true; + } } } diff --git a/src/Runner.Listener/Program.cs b/src/Runner.Listener/Program.cs index a6bdce62c..80852d32c 100644 --- a/src/Runner.Listener/Program.cs +++ b/src/Runner.Listener/Program.cs @@ -7,6 +7,7 @@ using System.Reflection; using System.Runtime.InteropServices; using System.Threading.Tasks; using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; namespace GitHub.Runner.Listener { @@ -144,6 +145,12 @@ namespace GitHub.Runner.Listener trace.Error(e); return Constants.Runner.ReturnCode.TerminatedError; } + catch (RunnerNotFoundException e) + { + terminal.WriteError($"An error occurred: {e.Message}"); + trace.Error(e); + return Constants.Runner.ReturnCode.TerminatedError; + } catch (Exception e) { terminal.WriteError($"An error occurred: {e.Message}"); diff --git a/src/Runner.Listener/Runner.Listener.csproj b/src/Runner.Listener/Runner.Listener.csproj index 3cd72ec61..68df8fbbc 100644 --- a/src/Runner.Listener/Runner.Listener.csproj +++ b/src/Runner.Listener/Runner.Listener.csproj @@ -1,11 +1,12 @@ - net6.0 + net8.0 Exe win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) false true @@ -18,11 +19,11 @@ - + - - - + + + diff --git a/src/Runner.Listener/Runner.cs b/src/Runner.Listener/Runner.cs index 263da5fef..8262d31f1 100644 --- a/src/Runner.Listener/Runner.cs +++ b/src/Runner.Listener/Runner.cs @@ -1,10 +1,12 @@ using System; +using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.CompilerServices; using System.Security.Cryptography; +using System.Security.Claims; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -14,7 +16,9 @@ using GitHub.Runner.Common.Util; using GitHub.Runner.Listener.Check; using GitHub.Runner.Listener.Configuration; using GitHub.Runner.Sdk; +using GitHub.Services.OAuth; using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Jwt; using Pipelines = GitHub.DistributedTask.Pipelines; namespace GitHub.Runner.Listener @@ -31,11 +35,35 @@ namespace GitHub.Runner.Listener private ITerminal _term; private bool _inConfigStage; private ManualResetEvent _completedCommand = new(false); + private readonly ConcurrentQueue _authMigrationTelemetries = new(); + private Task _authMigrationTelemetryTask; + private readonly object _authMigrationTelemetryLock = new(); + private Task _authMigrationClaimsCheckTask; + private readonly object _authMigrationClaimsCheckLock = new(); + private IRunnerServer _runnerServer; + private CancellationTokenSource _authMigrationTelemetryTokenSource = new(); + private CancellationTokenSource _authMigrationClaimsCheckTokenSource = new(); + + // + // Helps avoid excessive calls to Run Service when encountering non-retriable errors from /acquirejob. + // Normally we rely on the HTTP clients to back off between retry attempts. However, acquiring a job + // involves calls to both Run Serivce and Broker. And Run Service and Broker communicate with each other + // in an async fashion. + // + // When Run Service encounters a non-retriable error, it sends an async message to Broker. The runner will, + // however, immediately call Broker to get the next message. If the async event from Run Service to Broker + // has not yet been processed, the next message from Broker may be the same job message. + // + // The error throttler helps us back off when encountering successive, non-retriable errors from /acquirejob. + // + private IErrorThrottler _acquireJobThrottler; public override void Initialize(IHostContext hostContext) { base.Initialize(hostContext); _term = HostContext.GetService(); + _acquireJobThrottler = HostContext.CreateService(); + _runnerServer = HostContext.GetService(); } public async Task ExecuteCommand(CommandSettings command) @@ -51,6 +79,8 @@ namespace GitHub.Runner.Listener //register a SIGTERM handler HostContext.Unloading += Runner_Unloading; + HostContext.AuthMigrationChanged += HandleAuthMigrationChanged; + // TODO Unit test to cover this logic Trace.Info(nameof(ExecuteCommand)); var configManager = HostContext.GetService(); @@ -213,15 +243,21 @@ namespace GitHub.Runner.Listener var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key); var configContent = Convert.FromBase64String(config.Value); #if OS_WINDOWS +#pragma warning disable CA1416 if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials)) { configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine); } +#pragma warning restore CA1416 #endif File.WriteAllBytes(configFile, configContent); File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden); Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'."); } + + // make sure we have the right user agent data added from the jitconfig + HostContext.LoadDefaultUserAgents(); + VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy); } catch (Exception ex) { @@ -279,8 +315,17 @@ namespace GitHub.Runner.Listener _term.WriteLine("https://docs.github.com/en/actions/hosting-your-own-runners/autoscaling-with-self-hosted-runners#using-ephemeral-runners-for-autoscaling", ConsoleColor.Yellow); } + var cred = store.GetCredentials(); + if (cred != null && + cred.Scheme == Constants.Configuration.OAuth && + cred.Data.ContainsKey("EnableAuthMigrationByDefault")) + { + Trace.Info("Enable auth migration by default."); + HostContext.EnableAuthMigration("EnableAuthMigrationByDefault"); + } + // Run the runner interactively or as service - return await RunAsync(settings, command.RunOnce || settings.Ephemeral); + return await ExecuteRunnerAsync(settings, command.RunOnce || settings.Ephemeral); } else { @@ -290,6 +335,9 @@ namespace GitHub.Runner.Listener } finally { + _authMigrationClaimsCheckTokenSource?.Cancel(); + _authMigrationTelemetryTokenSource?.Cancel(); + HostContext.AuthMigrationChanged -= HandleAuthMigrationChanged; _term.CancelKeyPress -= CtrlCHandler; HostContext.Unloading -= Runner_Unloading; _completedCommand.Set(); @@ -339,12 +387,12 @@ namespace GitHub.Runner.Listener } } - private IMessageListener GetMesageListener(RunnerSettings settings) + private IMessageListener GetMessageListener(RunnerSettings settings, bool isMigratedSettings = false) { if (settings.UseV2Flow) { Trace.Info($"Using BrokerMessageListener"); - var brokerListener = new BrokerMessageListener(); + var brokerListener = new BrokerMessageListener(settings, isMigratedSettings); brokerListener.Initialize(HostContext); return brokerListener; } @@ -358,10 +406,65 @@ namespace GitHub.Runner.Listener try { Trace.Info(nameof(RunAsync)); - _listener = GetMesageListener(settings); - if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken)) + + // First try using migrated settings if available + var configManager = HostContext.GetService(); + RunnerSettings migratedSettings = null; + + try { - return Constants.Runner.ReturnCode.TerminatedError; + migratedSettings = configManager.LoadMigratedSettings(); + Trace.Info("Loaded migrated settings from .runner_migrated file"); + Trace.Info(migratedSettings); + } + catch (Exception ex) + { + // If migrated settings file doesn't exist or can't be loaded, we'll use the provided settings + Trace.Info($"Failed to load migrated settings: {ex.Message}"); + } + + bool usedMigratedSettings = false; + + if (migratedSettings != null) + { + // Try to create session with migrated settings first + Trace.Info("Attempting to create session using migrated settings"); + _listener = GetMessageListener(migratedSettings, isMigratedSettings: true); + + try + { + CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken); + if (createSessionResult == CreateSessionResult.Success) + { + Trace.Info("Successfully created session with migrated settings"); + settings = migratedSettings; // Use migrated settings for the rest of the process + usedMigratedSettings = true; + } + else + { + Trace.Warning($"Failed to create session with migrated settings: {createSessionResult}"); + } + } + catch (Exception ex) + { + Trace.Error($"Exception when creating session with migrated settings: {ex}"); + } + } + + // If migrated settings weren't used or session creation failed, use original settings + if (!usedMigratedSettings) + { + Trace.Info("Falling back to original .runner settings"); + _listener = GetMessageListener(settings); + CreateSessionResult createSessionResult = await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken); + if (createSessionResult == CreateSessionResult.SessionConflict) + { + return Constants.Runner.ReturnCode.SessionConflict; + } + else if (createSessionResult == CreateSessionResult.Failure) + { + return Constants.Runner.ReturnCode.TerminatedError; + } } HostContext.WritePerfCounter("SessionCreated"); @@ -375,6 +478,8 @@ namespace GitHub.Runner.Listener // Should we try to cleanup ephemeral runners bool runOnceJobCompleted = false; bool skipSessionDeletion = false; + bool restartSession = false; // Flag to indicate session restart + bool restartSessionPending = false; try { var notification = HostContext.GetService(); @@ -390,6 +495,15 @@ namespace GitHub.Runner.Listener while (!HostContext.RunnerShutdownToken.IsCancellationRequested) { + // Check if we need to restart the session and can do so (job dispatcher not busy) + if (restartSessionPending && !jobDispatcher.Busy) + { + Trace.Info("Pending session restart detected and job dispatcher is not busy. Restarting session now."); + messageQueueLoopTokenSource.Cancel(); + restartSession = true; + break; + } + TaskAgentMessage message = null; bool skipMessageDeletion = false; try @@ -540,36 +654,74 @@ namespace GitHub.Runner.Listener else { var messageRef = StringUtil.ConvertFromJson(message.Body); + + // Acknowledge (best-effort) + if (messageRef.ShouldAcknowledge) // Temporary feature flag + { + try + { + await _listener.AcknowledgeMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token); + } + catch (Exception ex) + { + Trace.Error($"Best-effort acknowledge failed for request '{messageRef.RunnerRequestId}'"); + Trace.Error(ex); + } + } + Pipelines.AgentJobRequestMessage jobRequestMessage = null; - - // Create connection - var credMgr = HostContext.GetService(); - var creds = credMgr.LoadCredentials(); - if (string.IsNullOrEmpty(messageRef.RunServiceUrl)) { + // Connect + var credMgr = HostContext.GetService(); + var creds = credMgr.LoadCredentials(allowAuthUrlV2: false); var actionsRunServer = HostContext.CreateService(); await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds); + + // Get job message jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token); } else { + // Connect + var credMgr = HostContext.GetService(); + var credsV2 = credMgr.LoadCredentials(allowAuthUrlV2: true); var runServer = HostContext.CreateService(); - await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds); + await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), credsV2); + + // Get job message try { - jobRequestMessage = - await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, - messageQueueLoopTokenSource.Token); + jobRequestMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageRef.BillingOwnerId, messageQueueLoopTokenSource.Token); + _acquireJobThrottler.Reset(); } - catch (TaskOrchestrationJobAlreadyAcquiredException) + catch (Exception ex) when ( + ex is TaskOrchestrationJobNotFoundException || // HTTP status 404 + ex is TaskOrchestrationJobAlreadyAcquiredException || // HTTP status 409 + ex is TaskOrchestrationJobUnprocessableException) // HTTP status 422 { - Trace.Info("Job is already acquired, skip this message."); + Trace.Info($"Skipping message Job. {ex.Message}"); + await _acquireJobThrottler.IncrementAndWaitAsync(messageQueueLoopTokenSource.Token); + continue; + } + catch (Exception ex) + { + Trace.Error($"Caught exception from acquiring job message: {ex}"); + + if (HostContext.AllowAuthMigration) + { + Trace.Info("Disable migration mode for 60 minutes."); + HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Acquire job failed with exception: {ex}"); + } + continue; } } + // Dispatch jobDispatcher.Run(jobRequestMessage, runOnce); + + // Run once? if (runOnce) { Trace.Info("One time used runner received job message."); @@ -596,6 +748,33 @@ namespace GitHub.Runner.Listener Trace.Info($"Service requests the hosted runner to shutdown. Reason: '{HostedRunnerShutdownMessage.Reason}'."); return Constants.Runner.ReturnCode.Success; } + else if (string.Equals(message.MessageType, TaskAgentMessageTypes.ForceTokenRefresh)) + { + Trace.Info("Received ForceTokenRefreshMessage"); + await _listener.RefreshListenerTokenAsync(); + } + else if (string.Equals(message.MessageType, RunnerRefreshConfigMessage.MessageType)) + { + var runnerRefreshConfigMessage = JsonUtility.FromString(message.Body); + Trace.Info($"Received RunnerRefreshConfigMessage for '{runnerRefreshConfigMessage.ConfigType}' config file"); + var configUpdater = HostContext.GetService(); + await configUpdater.UpdateRunnerConfigAsync( + runnerQualifiedId: runnerRefreshConfigMessage.RunnerQualifiedId, + configType: runnerRefreshConfigMessage.ConfigType, + serviceType: runnerRefreshConfigMessage.ServiceType, + configRefreshUrl: runnerRefreshConfigMessage.ConfigRefreshUrl); + + // Set flag to schedule session restart if ConfigType is "runner" + if (string.Equals(runnerRefreshConfigMessage.ConfigType, "runner", StringComparison.OrdinalIgnoreCase)) + { + Trace.Info("Runner configuration was updated. Session restart has been scheduled"); + restartSessionPending = true; + } + else + { + Trace.Info($"No session restart needed for config type: {runnerRefreshConfigMessage.ConfigType}"); + } + } else { Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}."); @@ -649,19 +828,243 @@ namespace GitHub.Runner.Listener if (settings.Ephemeral && runOnceJobCompleted) { - var configManager = HostContext.GetService(); configManager.DeleteLocalRunnerConfig(); } } + + // After cleanup, check if we need to restart the session + if (restartSession) + { + Trace.Info("Restarting runner session after config update..."); + return Constants.Runner.ReturnCode.RunnerConfigurationRefreshed; + } } catch (TaskAgentAccessTokenExpiredException) { Trace.Info("Runner OAuth token has been revoked. Shutting down."); } + catch (HostedRunnerDeprovisionedException) + { + Trace.Info("Hosted runner has been deprovisioned. Shutting down."); + } return Constants.Runner.ReturnCode.Success; } + private async Task ExecuteRunnerAsync(RunnerSettings settings, bool runOnce) + { + int returnCode = Constants.Runner.ReturnCode.Success; + bool restart = false; + do + { + restart = false; + returnCode = await RunAsync(settings, runOnce); + + if (returnCode == Constants.Runner.ReturnCode.RunnerConfigurationRefreshed) + { + Trace.Info("Runner configuration was refreshed, restarting session..."); + // Reload settings in case they changed + var configManager = HostContext.GetService(); + settings = configManager.LoadSettings(); + restart = true; + } + } while (restart); + + return returnCode; + } + + private void HandleAuthMigrationChanged(object sender, AuthMigrationEventArgs e) + { + Trace.Verbose("Handle AuthMigrationChanged in Runner"); + _authMigrationTelemetries.Enqueue($"{DateTime.UtcNow.ToString("O")}: {e.Trace}"); + + // only start the telemetry reporting task once auth migration is changed (enabled or disabled) + lock (_authMigrationTelemetryLock) + { + if (_authMigrationTelemetryTask == null) + { + _authMigrationTelemetryTask = ReportAuthMigrationTelemetryAsync(_authMigrationTelemetryTokenSource.Token); + } + } + + // only start the claims check task once auth migration is changed (enabled or disabled) + lock (_authMigrationClaimsCheckLock) + { + if (_authMigrationClaimsCheckTask == null) + { + _authMigrationClaimsCheckTask = CheckOAuthTokenClaimsAsync(_authMigrationClaimsCheckTokenSource.Token); + } + } + } + + private async Task CheckOAuthTokenClaimsAsync(CancellationToken token) + { + string[] expectedClaims = + [ + "owner_id", + "runner_id", + "runner_group_id", + "scale_set_id", + "is_ephemeral", + "labels" + ]; + + try + { + var credMgr = HostContext.GetService(); + while (!token.IsCancellationRequested) + { + try + { + await HostContext.Delay(TimeSpan.FromMinutes(100), token); + } + catch (TaskCanceledException) + { + // Ignore cancellation + } + + if (token.IsCancellationRequested) + { + break; + } + + if (!HostContext.AllowAuthMigration) + { + Trace.Info("Skip checking oauth token claims since auth migration is disabled."); + continue; + } + + var baselineCred = credMgr.LoadCredentials(allowAuthUrlV2: false); + var authV2Cred = credMgr.LoadCredentials(allowAuthUrlV2: true); + + if (!(baselineCred.Federated is VssOAuthCredential baselineVssOAuthCred) || + !(authV2Cred.Federated is VssOAuthCredential vssOAuthCredV2) || + baselineVssOAuthCred == null || + vssOAuthCredV2 == null) + { + Trace.Info("Skip checking oauth token claims for non-oauth credentials"); + continue; + } + + if (string.Equals(baselineVssOAuthCred.AuthorizationUrl.AbsoluteUri, vssOAuthCredV2.AuthorizationUrl.AbsoluteUri, StringComparison.OrdinalIgnoreCase)) + { + Trace.Info("Skip checking oauth token claims for same authorization url"); + continue; + } + + var baselineProvider = baselineVssOAuthCred.GetTokenProvider(baselineVssOAuthCred.AuthorizationUrl); + var v2Provider = vssOAuthCredV2.GetTokenProvider(vssOAuthCredV2.AuthorizationUrl); + try + { + using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + using (var requestTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token, timeoutTokenSource.Token)) + { + var baselineToken = await baselineProvider.GetTokenAsync(null, requestTokenSource.Token); + var v2Token = await v2Provider.GetTokenAsync(null, requestTokenSource.Token); + if (baselineToken is VssOAuthAccessToken baselineAccessToken && + v2Token is VssOAuthAccessToken v2AccessToken && + !string.IsNullOrEmpty(baselineAccessToken.Value) && + !string.IsNullOrEmpty(v2AccessToken.Value)) + { + var baselineJwt = JsonWebToken.Create(baselineAccessToken.Value); + var baselineClaims = baselineJwt.ExtractClaims(); + var v2Jwt = JsonWebToken.Create(v2AccessToken.Value); + var v2Claims = v2Jwt.ExtractClaims(); + + // Log extracted claims for debugging + Trace.Verbose($"Baseline token expected claims: {string.Join(", ", baselineClaims + .Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant())) + .Select(c => $"{c.Type}:{c.Value}"))}"); + Trace.Verbose($"V2 token expected claims: {string.Join(", ", v2Claims + .Where(c => expectedClaims.Contains(c.Type.ToLowerInvariant())) + .Select(c => $"{c.Type}:{c.Value}"))}"); + + foreach (var claim in expectedClaims) + { + // if baseline has the claim, v2 should have it too with exactly same value. + if (baselineClaims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim) is Claim baselineClaim && + !string.IsNullOrEmpty(baselineClaim?.Value)) + { + var v2Claim = v2Claims.FirstOrDefault(c => c.Type.ToLowerInvariant() == claim); + if (v2Claim?.Value != baselineClaim.Value) + { + Trace.Info($"Token Claim mismatch between two issuers. Expected: {baselineClaim.Type}:{baselineClaim.Value}. Actual: {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}"); + HostContext.DeferAuthMigration(TimeSpan.FromMinutes(60), $"Expected claim {baselineClaim.Type}:{baselineClaim.Value} does not match {v2Claim?.Type ?? "Empty"}:{v2Claim?.Value ?? "Empty"}"); + break; + } + } + } + + Trace.Info("OAuth token claims check passed."); + } + } + } + catch (Exception ex) + { + Trace.Error("Failed to fetch and check OAuth token claims."); + Trace.Error(ex); + } + } + } + catch (Exception ex) + { + Trace.Error("Failed to check OAuth token claims in background."); + Trace.Error(ex); + } + } + + private async Task ReportAuthMigrationTelemetryAsync(CancellationToken token) + { + var configManager = HostContext.GetService(); + var runnerSettings = configManager.LoadSettings(); + + while (!token.IsCancellationRequested) + { + try + { + await HostContext.Delay(TimeSpan.FromSeconds(60), token); + } + catch (TaskCanceledException) + { + // Ignore cancellation + } + + Trace.Verbose("Checking for auth migration telemetry to report"); + while (_authMigrationTelemetries.TryDequeue(out var telemetry)) + { + Trace.Verbose($"Reporting auth migration telemetry: {telemetry}"); + if (runnerSettings != null) + { + try + { + using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + await _runnerServer.UpdateAgentUpdateStateAsync(runnerSettings.PoolId, runnerSettings.AgentId, "RefreshConfig", telemetry, tokenSource.Token); + } + } + catch (Exception ex) + { + Trace.Error("Failed to report auth migration telemetry."); + Trace.Error(ex); + _authMigrationTelemetries.Enqueue(telemetry); + } + } + + if (!token.IsCancellationRequested) + { + try + { + await HostContext.Delay(TimeSpan.FromSeconds(10), token); + } + catch (TaskCanceledException) + { + // Ignore cancellation + } + } + } + } + } + private void PrintUsage(CommandSettings command) { string separator; diff --git a/src/Runner.Listener/RunnerConfigUpdater.cs b/src/Runner.Listener/RunnerConfigUpdater.cs new file mode 100644 index 000000000..c188ad731 --- /dev/null +++ b/src/Runner.Listener/RunnerConfigUpdater.cs @@ -0,0 +1,287 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.Services.Common; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(RunnerConfigUpdater))] + public interface IRunnerConfigUpdater : IRunnerService + { + Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl); + } + + public sealed class RunnerConfigUpdater : RunnerService, IRunnerConfigUpdater + { + private RunnerSettings _settings; + private CredentialData _credData; + private IRunnerServer _runnerServer; + private IConfigurationStore _store; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _store = hostContext.GetService(); + _settings = _store.GetSettings(); + _credData = _store.GetCredentials(); + _runnerServer = HostContext.GetService(); + } + + public async Task UpdateRunnerConfigAsync(string runnerQualifiedId, string configType, string serviceType, string configRefreshUrl) + { + Trace.Entering(); + try + { + ArgUtil.NotNullOrEmpty(runnerQualifiedId, nameof(runnerQualifiedId)); + ArgUtil.NotNullOrEmpty(configType, nameof(configType)); + ArgUtil.NotNullOrEmpty(serviceType, nameof(serviceType)); + ArgUtil.NotNullOrEmpty(configRefreshUrl, nameof(configRefreshUrl)); + + // make sure the runner qualified id matches the current runner + if (!await VerifyRunnerQualifiedId(runnerQualifiedId)) + { + return; + } + + // keep the timeout short to avoid blocking the main thread + using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + switch (configType.ToLowerInvariant()) + { + case "runner": + await UpdateRunnerSettingsAsync(serviceType, configRefreshUrl, tokenSource.Token); + break; + case "credentials": + await UpdateRunnerCredentialsAsync(serviceType, configRefreshUrl, tokenSource.Token); + break; + default: + Trace.Error($"Invalid config type '{configType}'."); + await ReportTelemetryAsync($"Invalid config type '{configType}'."); + return; + } + } + } + catch (Exception ex) + { + Trace.Error($"Failed to update runner '{configType}' config."); + Trace.Error(ex); + await ReportTelemetryAsync($"Failed to update runner '{configType}' config: {ex}"); + } + } + + private async Task UpdateRunnerSettingsAsync(string serviceType, string configRefreshUrl, CancellationToken token) + { + Trace.Entering(); + // read the current runner settings and encode with base64 + var runnerConfig = HostContext.GetConfigFile(WellKnownConfigFile.Runner); + string runnerConfigContent = File.ReadAllText(runnerConfig, Encoding.UTF8); + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(runnerConfigContent)); + if (string.IsNullOrEmpty(encodedConfig)) + { + await ReportTelemetryAsync("Failed to get encoded runner settings."); + return; + } + + // exchange the encoded runner settings with the service + string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "runner", configRefreshUrl, token); + if (string.IsNullOrEmpty(refreshedEncodedConfig)) + { + // service will return empty string if there is no change in the config + return; + } + + var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig)); + RunnerSettings refreshedRunnerConfig; + try + { + refreshedRunnerConfig = StringUtil.ConvertFromJson(decodedConfig); + } + catch (Exception ex) + { + Trace.Error($"Failed to convert runner config from json '{decodedConfig}'."); + Trace.Error(ex); + await ReportTelemetryAsync($"Failed to convert runner config '{decodedConfig}' from json: {ex}"); + return; + } + + // make sure the runner id and name in the refreshed config match the current runner + if (refreshedRunnerConfig?.AgentId != _settings.AgentId) + { + Trace.Error($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'."); + await ReportTelemetryAsync($"Runner id in refreshed config '{refreshedRunnerConfig?.AgentId.ToString() ?? "Empty"}' does not match the current runner '{_settings.AgentId}'."); + return; + } + + if (refreshedRunnerConfig?.AgentName != _settings.AgentName) + { + Trace.Error($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'."); + await ReportTelemetryAsync($"Runner name in refreshed config '{refreshedRunnerConfig?.AgentName ?? "Empty"}' does not match the current runner '{_settings.AgentName}'."); + return; + } + + // save the refreshed runner settings as a separate file + _store.SaveMigratedSettings(refreshedRunnerConfig); + await ReportTelemetryAsync("Runner settings updated successfully."); + } + + private async Task UpdateRunnerCredentialsAsync(string serviceType, string configRefreshUrl, CancellationToken token) + { + Trace.Entering(); + // read the current runner credentials and encode with base64 + var credConfig = HostContext.GetConfigFile(WellKnownConfigFile.Credentials); + string credConfigContent = File.ReadAllText(credConfig, Encoding.UTF8); + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(credConfigContent)); + if (string.IsNullOrEmpty(encodedConfig)) + { + await ReportTelemetryAsync("Failed to get encoded credentials."); + return; + } + + CredentialData currentCred = _store.GetCredentials(); + if (currentCred == null) + { + await ReportTelemetryAsync("Failed to get current credentials."); + return; + } + + // we only support refreshing OAuth credentials which is used by self-hosted runners. + if (currentCred.Scheme != Constants.Configuration.OAuth) + { + await ReportTelemetryAsync($"Not supported credential scheme '{currentCred.Scheme}'."); + return; + } + + // exchange the encoded runner credentials with the service + string refreshedEncodedConfig = await RefreshRunnerConfigAsync(encodedConfig, serviceType, "credentials", configRefreshUrl, token); + if (string.IsNullOrEmpty(refreshedEncodedConfig)) + { + // service will return empty string if there is no change in the config + return; + } + + var decodedConfig = Encoding.UTF8.GetString(Convert.FromBase64String(refreshedEncodedConfig)); + CredentialData refreshedCredConfig; + try + { + refreshedCredConfig = StringUtil.ConvertFromJson(decodedConfig); + } + catch (Exception ex) + { + Trace.Error($"Failed to convert credentials config from json '{decodedConfig}'."); + Trace.Error(ex); + await ReportTelemetryAsync($"Failed to convert credentials config '{decodedConfig}' from json: {ex}"); + return; + } + + // make sure the credential scheme in the refreshed config match the current credential scheme + if (refreshedCredConfig?.Scheme != _credData.Scheme) + { + Trace.Error($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'."); + await ReportTelemetryAsync($"Credential scheme in refreshed config '{refreshedCredConfig?.Scheme ?? "Empty"}' does not match the current credential scheme '{_credData.Scheme}'."); + return; + } + + if (_credData.Scheme == Constants.Configuration.OAuth) + { + // make sure the credential clientId in the refreshed config match the current credential clientId for OAuth auth scheme + var clientId = _credData.Data.GetValueOrDefault("clientId", null); + var refreshedClientId = refreshedCredConfig.Data.GetValueOrDefault("clientId", null); + if (clientId != refreshedClientId) + { + Trace.Error($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'."); + await ReportTelemetryAsync($"Credential clientId in refreshed config '{refreshedClientId ?? "Empty"}' does not match the current credential clientId '{clientId}'."); + return; + } + + // make sure the credential authorizationUrl in the refreshed config match the current credential authorizationUrl for OAuth auth scheme + var authorizationUrl = _credData.Data.GetValueOrDefault("authorizationUrl", null); + var refreshedAuthorizationUrl = refreshedCredConfig.Data.GetValueOrDefault("authorizationUrl", null); + if (authorizationUrl != refreshedAuthorizationUrl) + { + Trace.Error($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'."); + await ReportTelemetryAsync($"Credential authorizationUrl in refreshed config '{refreshedAuthorizationUrl ?? "Empty"}' does not match the current credential authorizationUrl '{authorizationUrl}'."); + return; + } + } + + // save the refreshed runner credentials as a separate file + _store.SaveMigratedCredential(refreshedCredConfig); + + if (refreshedCredConfig.Data.ContainsKey("authorizationUrlV2")) + { + HostContext.EnableAuthMigration("Credential file updated"); + await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is enabled."); + } + else + { + HostContext.DeferAuthMigration(TimeSpan.FromDays(365), "Credential file does not contain authorizationUrlV2"); + await ReportTelemetryAsync("Runner credentials updated successfully. Auth migration is disabled."); + } + } + + private async Task VerifyRunnerQualifiedId(string runnerQualifiedId) + { + Trace.Entering(); + Trace.Info($"Verifying runner qualified id: {runnerQualifiedId}"); + var idParts = runnerQualifiedId.Split("/", StringSplitOptions.RemoveEmptyEntries); + if (idParts.Length != 4 || idParts[3] != _settings.AgentId.ToString()) + { + Trace.Error($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'."); + await ReportTelemetryAsync($"Runner qualified id '{runnerQualifiedId}' does not match the current runner '{_settings.AgentId}'."); + return false; + } + return true; + } + + private async Task RefreshRunnerConfigAsync(string encodedConfig, string serviceType, string configType, string configRefreshUrl, CancellationToken token) + { + string refreshedEncodedConfig; + switch (serviceType.ToLowerInvariant()) + { + case "pipelines": + try + { + refreshedEncodedConfig = await _runnerServer.RefreshRunnerConfigAsync((int)_settings.AgentId, configType, encodedConfig, token); + } + catch (Exception ex) + { + Trace.Error($"Failed to refresh runner {configType} config with service."); + Trace.Error(ex); + await ReportTelemetryAsync($"Failed to refresh {configType} config: {ex}"); + return null; + } + break; + case "runner-admin": + throw new NotSupportedException("Runner admin service is not supported."); + default: + Trace.Error($"Invalid service type '{serviceType}'."); + await ReportTelemetryAsync($"Invalid service type '{serviceType}'."); + return null; + } + + return refreshedEncodedConfig; + } + + private async Task ReportTelemetryAsync(string telemetry) + { + Trace.Entering(); + try + { + using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + await _runnerServer.UpdateAgentUpdateStateAsync(_settings.PoolId, _settings.AgentId, "RefreshConfig", telemetry, tokenSource.Token); + } + } + catch (Exception ex) + { + Trace.Error("Failed to report telemetry."); + Trace.Error(ex); + } + } + } +} diff --git a/src/Runner.Listener/RunnerJobRequestRef.cs b/src/Runner.Listener/RunnerJobRequestRef.cs index df8a4d793..331dbb21d 100644 --- a/src/Runner.Listener/RunnerJobRequestRef.cs +++ b/src/Runner.Listener/RunnerJobRequestRef.cs @@ -7,9 +7,17 @@ namespace GitHub.Runner.Listener { [DataMember(Name = "id")] public string Id { get; set; } + [DataMember(Name = "runner_request_id")] public string RunnerRequestId { get; set; } + + [DataMember(Name = "should_acknowledge")] + public bool ShouldAcknowledge { get; set; } + [DataMember(Name = "run_service_url")] public string RunServiceUrl { get; set; } + + [DataMember(Name = "billing_owner_id")] + public string BillingOwnerId { get; set; } } } diff --git a/src/Runner.PluginHost/Runner.PluginHost.csproj b/src/Runner.PluginHost/Runner.PluginHost.csproj index df30f3450..81a8d2e43 100644 --- a/src/Runner.PluginHost/Runner.PluginHost.csproj +++ b/src/Runner.PluginHost/Runner.PluginHost.csproj @@ -1,11 +1,12 @@  - net6.0 + net8.0 Exe win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) false true diff --git a/src/Runner.Plugins/Runner.Plugins.csproj b/src/Runner.Plugins/Runner.Plugins.csproj index 39245a3f7..a786cf1cd 100644 --- a/src/Runner.Plugins/Runner.Plugins.csproj +++ b/src/Runner.Plugins/Runner.Plugins.csproj @@ -1,11 +1,12 @@  - net6.0 + net8.0 Library win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) diff --git a/src/Runner.Sdk/Runner.Sdk.csproj b/src/Runner.Sdk/Runner.Sdk.csproj index 202e8669a..55dbf1262 100644 --- a/src/Runner.Sdk/Runner.Sdk.csproj +++ b/src/Runner.Sdk/Runner.Sdk.csproj @@ -1,11 +1,12 @@  - net6.0 + net8.0 Library win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) @@ -14,9 +15,9 @@ - - - + + + diff --git a/src/Runner.Sdk/Util/IOUtil.cs b/src/Runner.Sdk/Util/IOUtil.cs index da4a8a09b..e0b5b3394 100644 --- a/src/Runner.Sdk/Util/IOUtil.cs +++ b/src/Runner.Sdk/Util/IOUtil.cs @@ -459,6 +459,34 @@ namespace GitHub.Runner.Sdk File.WriteAllText(path, null); } + /// + /// Replaces invalid file name characters with '_' + /// + public static string ReplaceInvalidFileNameChars(string fileName) + { + var result = new StringBuilder(); + var invalidChars = Path.GetInvalidFileNameChars(); + + var current = 0; // Current index + while (current < fileName?.Length) + { + var next = fileName.IndexOfAny(invalidChars, current); + if (next >= 0) + { + result.Append(fileName.Substring(current, next - current)); + result.Append('_'); + current = next + 1; + } + else + { + result.Append(fileName.Substring(current)); + break; + } + } + + return result.ToString(); + } + /// /// Recursively enumerates a directory without following directory reparse points. /// diff --git a/src/Runner.Sdk/Util/UrlUtil.cs b/src/Runner.Sdk/Util/UrlUtil.cs index 01658da05..52ce3a0cb 100644 --- a/src/Runner.Sdk/Util/UrlUtil.cs +++ b/src/Runner.Sdk/Util/UrlUtil.cs @@ -60,5 +60,15 @@ namespace GitHub.Runner.Sdk } return string.Empty; } + + public static string GetVssRequestId(HttpResponseHeaders headers) + { + if (headers != null && + headers.TryGetValues("x-vss-e2eid", out var headerValues)) + { + return headerValues.FirstOrDefault(); + } + return string.Empty; + } } } diff --git a/src/Runner.Sdk/Util/VssUtil.cs b/src/Runner.Sdk/Util/VssUtil.cs index 894e29864..012d27f73 100644 --- a/src/Runner.Sdk/Util/VssUtil.cs +++ b/src/Runner.Sdk/Util/VssUtil.cs @@ -38,6 +38,7 @@ namespace GitHub.Runner.Sdk if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY"))) { VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; + RawClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; } var rawHeaderValues = new List(); @@ -85,11 +86,6 @@ namespace GitHub.Runner.Sdk settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200)); } - if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("USE_BROKER_FLOW"))) - { - settings.AllowAutoRedirectForBroker = true; - } - // Remove Invariant from the list of accepted languages. // // The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current diff --git a/src/Runner.Sdk/Util/WhichUtil.cs b/src/Runner.Sdk/Util/WhichUtil.cs index fde4fa2f6..ef7683a2d 100644 --- a/src/Runner.Sdk/Util/WhichUtil.cs +++ b/src/Runner.Sdk/Util/WhichUtil.cs @@ -7,129 +7,6 @@ namespace GitHub.Runner.Sdk public static class WhichUtil { public static string Which(string command, bool require = false, ITraceWriter trace = null, string prependPath = null) - { - ArgUtil.NotNullOrEmpty(command, nameof(command)); - trace?.Info($"Which: '{command}'"); - if (Path.IsPathFullyQualified(command) && File.Exists(command)) - { - trace?.Info($"Fully qualified path: '{command}'"); - return command; - } - string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable); - if (string.IsNullOrEmpty(path)) - { - trace?.Info("PATH environment variable not defined."); - path = path ?? string.Empty; - } - if (!string.IsNullOrEmpty(prependPath)) - { - path = PathUtil.PrependPath(prependPath, path); - } - - string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries); - for (int i = 0; i < pathSegments.Length; i++) - { - pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]); - } - - foreach (string pathSegment in pathSegments) - { - if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment)) - { - string[] matches = null; -#if OS_WINDOWS - string pathExt = Environment.GetEnvironmentVariable("PATHEXT"); - if (string.IsNullOrEmpty(pathExt)) - { - // XP's system default value for PATHEXT system variable - pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh"; - } - - string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); - - // if command already has an extension. - if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) - { - try - { - matches = Directory.GetFiles(pathSegment, command); - } - catch (UnauthorizedAccessException ex) - { - trace?.Info("Ignore UnauthorizedAccess exception during Which."); - trace?.Verbose(ex.ToString()); - } - - if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace)) - { - trace?.Info($"Location: '{matches.First()}'"); - return matches.First(); - } - } - else - { - string searchPattern; - searchPattern = StringUtil.Format($"{command}.*"); - try - { - matches = Directory.GetFiles(pathSegment, searchPattern); - } - catch (UnauthorizedAccessException ex) - { - trace?.Info("Ignore UnauthorizedAccess exception during Which."); - trace?.Verbose(ex.ToString()); - } - - if (matches != null && matches.Length > 0) - { - // add extension. - for (int i = 0; i < pathExtSegments.Length; i++) - { - string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}"); - if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)) && IsPathValid(fullPath, trace)) - { - trace?.Info($"Location: '{fullPath}'"); - return fullPath; - } - } - } - } -#else - try - { - matches = Directory.GetFiles(pathSegment, command); - } - catch (UnauthorizedAccessException ex) - { - trace?.Info("Ignore UnauthorizedAccess exception during Which."); - trace?.Verbose(ex.ToString()); - } - - if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace)) - { - trace?.Info($"Location: '{matches.First()}'"); - return matches.First(); - } -#endif - } - } - -#if OS_WINDOWS - trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable."); -#else - trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable."); -#endif - if (require) - { - throw new FileNotFoundException( - message: $"{command}: command not found", - fileName: command); - } - - return null; - } - - public static string Which2(string command, bool require = false, ITraceWriter trace = null, string prependPath = null) { ArgUtil.NotNullOrEmpty(command, nameof(command)); trace?.Info($"Which2: '{command}'"); diff --git a/src/Runner.Service/Windows/App.config b/src/Runner.Service/Windows/App.config index 486e3ea09..e1798d31b 100644 --- a/src/Runner.Service/Windows/App.config +++ b/src/Runner.Service/Windows/App.config @@ -1,6 +1,6 @@  - + diff --git a/src/Runner.Service/Windows/RunnerService.csproj b/src/Runner.Service/Windows/RunnerService.csproj index 238e67aac..ed43de7b0 100644 --- a/src/Runner.Service/Windows/RunnerService.csproj +++ b/src/Runner.Service/Windows/RunnerService.csproj @@ -18,7 +18,7 @@ v4.8 - v4.5 + v4.7 AnyCPU diff --git a/src/Runner.Worker/ActionManager.cs b/src/Runner.Worker/ActionManager.cs index bf7838c1b..9a21aeb4c 100644 --- a/src/Runner.Worker/ActionManager.cs +++ b/src/Runner.Worker/ActionManager.cs @@ -483,10 +483,6 @@ namespace GitHub.Runner.Worker { // Load stored Ids for later load actions compositeAction.Steps[i].Id = _cachedEmbeddedStepIds[action.Id][i]; - if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && compositeAction.Steps[i].Reference.Type != Pipelines.ActionSourceType.Script) - { - throw new Exception("`uses:` keyword is not currently supported."); - } } } else @@ -692,7 +688,8 @@ namespace GitHub.Runner.Worker { if (MessageUtil.IsRunServiceJob(executionContext.Global.Variables.Get(Constants.Variables.System.JobRequestType))) { - actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken); + var displayHelpfulActionsDownloadErrors = executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.DisplayHelpfulActionsDownloadErrors) ?? false; + actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken, displayHelpfulActionsDownloadErrors); } else { @@ -703,11 +700,12 @@ namespace GitHub.Runner.Worker catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled. { // UnresolvableActionDownloadInfoException is a 422 client error, don't retry + // NonRetryableActionDownloadInfoException is an non-retryable exception from Actions // Some possible cases are: // * Repo is rate limited // * Repo or tag doesn't exist, or isn't public // * Policy validation failed - if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException)) + if (attempt < 3 && !(ex is WebApi.UnresolvableActionDownloadInfoException) && !(ex is WebApi.NonRetryableActionDownloadInfoException)) { executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}"); executionContext.Debug(ex.ToString()); @@ -778,7 +776,19 @@ namespace GitHub.Runner.Worker // make sure we get a clean folder ready to use. IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken); Directory.CreateDirectory(destDirectory); - executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})"); + + if (downloadInfo.PackageDetails != null) + { + executionContext.Output($"##[group]Download immutable action package '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'"); + executionContext.Output($"Version: {downloadInfo.PackageDetails.Version}"); + executionContext.Output($"Digest: {downloadInfo.PackageDetails.ManifestDigest}"); + executionContext.Output($"Source commit SHA: {downloadInfo.ResolvedSha}"); + executionContext.Output("##[endgroup]"); + } + else + { + executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' (SHA:{downloadInfo.ResolvedSha})"); + } } //download and extract action in a temp folder and rename it on success @@ -796,43 +806,40 @@ namespace GitHub.Runner.Worker try { var useActionArchiveCache = false; - if (executionContext.Global.Variables.GetBoolean("DistributedTask.UseActionArchiveCache") == true) + var hasActionArchiveCache = false; + var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory); + if (!string.IsNullOrEmpty(actionArchiveCacheDir) && + Directory.Exists(actionArchiveCacheDir)) { - var hasActionArchiveCache = false; - var actionArchiveCacheDir = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ActionArchiveCacheDirectory); - if (!string.IsNullOrEmpty(actionArchiveCacheDir) && - Directory.Exists(actionArchiveCacheDir)) - { - hasActionArchiveCache = true; - Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'"); + hasActionArchiveCache = true; + Trace.Info($"Check if action archive '{downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha}' already exists in cache directory '{actionArchiveCacheDir}'"); #if OS_WINDOWS - var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip"); + var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.zip"); #else - var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz"); + var cacheArchiveFile = Path.Combine(actionArchiveCacheDir, downloadInfo.ResolvedNameWithOwner.Replace(Path.DirectorySeparatorChar, '_').Replace(Path.AltDirectorySeparatorChar, '_'), $"{downloadInfo.ResolvedSha}.tar.gz"); #endif - if (File.Exists(cacheArchiveFile)) + if (File.Exists(cacheArchiveFile)) + { + try { - try - { - Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'"); - File.Copy(cacheArchiveFile, archiveFile); - useActionArchiveCache = true; - executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'"); - } - catch (Exception ex) - { - Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}"); - } + Trace.Info($"Found action archive '{cacheArchiveFile}' in cache directory '{actionArchiveCacheDir}'"); + File.Copy(cacheArchiveFile, archiveFile); + useActionArchiveCache = true; + executionContext.Debug($"Copied action archive '{cacheArchiveFile}' to '{archiveFile}'"); + } + catch (Exception ex) + { + Trace.Error($"Failed to copy action archive '{cacheArchiveFile}' to '{archiveFile}'. Error: {ex}"); } } - - executionContext.Global.JobTelemetry.Add(new JobTelemetry() - { - Type = JobTelemetryType.General, - Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}" - }); } + executionContext.Global.JobTelemetry.Add(new JobTelemetry() + { + Type = JobTelemetryType.General, + Message = $"Action archive cache usage: {downloadInfo.ResolvedNameWithOwner}@{downloadInfo.ResolvedSha} use cache {useActionArchiveCache} has cache {hasActionArchiveCache}" + }); + if (!useActionArchiveCache) { await DownloadRepositoryArchive(executionContext, link, downloadInfo.Authentication?.Token, archiveFile); @@ -878,16 +885,9 @@ namespace GitHub.Runner.Worker int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken); if (exitCode != 0) { - if (executionContext.Global.Variables.GetBoolean("DistributedTask.DetailUntarFailure") == true) - { - var fileInfo = new FileInfo(archiveFile); - var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile); - throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}."); - } - else - { - throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}."); - } + var fileInfo = new FileInfo(archiveFile); + var sha256hash = await IOUtil.GetFileContentSha256HashAsync(archiveFile); + throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile} (SHA256 '{sha256hash}', size '{fileInfo.Length}' bytes, tar outputs '{string.Join(' ', tarOutputs)}'). Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}."); } } #endif @@ -1031,13 +1031,6 @@ namespace GitHub.Runner.Worker } } - foreach (var step in compositeAction.Steps) - { - if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && step.Reference.Type != Pipelines.ActionSourceType.Script) - { - throw new Exception("`uses:` keyword is not currently supported."); - } - } return setupInfo; } else @@ -1122,6 +1115,7 @@ namespace GitHub.Runner.Worker int timeoutSeconds = 20 * 60; while (retryCount < 3) { + string requestId = string.Empty; using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds))) using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken)) { @@ -1137,7 +1131,7 @@ namespace GitHub.Runner.Worker httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents); using (var response = await httpClient.GetAsync(downloadUrl)) { - var requestId = UrlUtil.GetGitHubRequestId(response.Headers); + requestId = UrlUtil.GetGitHubRequestId(response.Headers); if (!string.IsNullOrEmpty(requestId)) { Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}"); @@ -1175,7 +1169,7 @@ namespace GitHub.Runner.Worker catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2) { Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds."); - throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message}"); + throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message} {requestId}"); } catch (ActionNotFoundException) { @@ -1190,11 +1184,11 @@ namespace GitHub.Runner.Worker if (actionDownloadTimeout.Token.IsCancellationRequested) { // action download didn't finish within timeout - executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds."); + executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds. {requestId}"); } else { - executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message}"); + executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message} {requestId}"); } } } diff --git a/src/Runner.Worker/ActionManifestManager.cs b/src/Runner.Worker/ActionManifestManager.cs index 6b46e6a4e..c731b3d5d 100644 --- a/src/Runner.Worker/ActionManifestManager.cs +++ b/src/Runner.Worker/ActionManifestManager.cs @@ -144,7 +144,7 @@ namespace GitHub.Runner.Worker executionContext.Error(error.Message); } - throw new ArgumentException($"Fail to load {fileRelativePath}"); + throw new ArgumentException($"Failed to load {fileRelativePath}"); } if (actionDefinition.Execution == null) @@ -450,7 +450,8 @@ namespace GitHub.Runner.Worker } else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase) || string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase) || - string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase)) + string.Equals(usingToken.Value, "node20", StringComparison.OrdinalIgnoreCase) || + string.Equals(usingToken.Value, "node24", StringComparison.OrdinalIgnoreCase)) { if (string.IsNullOrEmpty(mainToken?.Value)) { @@ -490,7 +491,7 @@ namespace GitHub.Runner.Worker } else { - throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16' or 'node20' instead."); + throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12', 'node16', 'node20' or 'node24' instead."); } } else if (pluginToken != null) @@ -501,7 +502,7 @@ namespace GitHub.Runner.Worker }; } - throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'."); + throw new NotSupportedException("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'."); } private void ConvertInputs( diff --git a/src/Runner.Worker/ContainerOperationProvider.cs b/src/Runner.Worker/ContainerOperationProvider.cs index 6e5b12047..c5cccb77e 100644 --- a/src/Runner.Worker/ContainerOperationProvider.cs +++ b/src/Runner.Worker/ContainerOperationProvider.cs @@ -466,17 +466,39 @@ namespace GitHub.Runner.Worker { throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}"); } - var loginExitCode = await _dockerManager.DockerLogin( - executionContext, - configLocation, - container.RegistryServer, - container.RegistryAuthUsername, - container.RegistryAuthPassword); - if (loginExitCode != 0) + // Login docker with retry up to 3 times + int retryCount = 0; + int loginExitCode = 0; + while (retryCount < 3) + { + loginExitCode = await _dockerManager.DockerLogin( + executionContext, + configLocation, + container.RegistryServer, + container.RegistryAuthUsername, + container.RegistryAuthPassword); + if (loginExitCode == 0) + { + break; + } + else + { + retryCount++; + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + executionContext.Warning($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + if (retryCount == 3 && loginExitCode != 0) { throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}"); } + return configLocation; } diff --git a/src/Runner.Worker/DiagnosticLogManager.cs b/src/Runner.Worker/DiagnosticLogManager.cs index 261689b5f..afc811b15 100644 --- a/src/Runner.Worker/DiagnosticLogManager.cs +++ b/src/Runner.Worker/DiagnosticLogManager.cs @@ -91,13 +91,13 @@ namespace GitHub.Runner.Worker string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName"; // zip the files - string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip"; + string diagnosticsZipFileName = $"{buildName}-{IOUtil.ReplaceInvalidFileNameChars(phaseName)}.zip"; string diagnosticsZipFilePath = Path.Combine(supportRootFolder, diagnosticsZipFileName); ZipFile.CreateFromDirectory(supportFilesFolder, diagnosticsZipFilePath); // upload the json metadata file executionContext.Debug("Uploading diagnostic metadata file."); - string metadataFileName = $"diagnostics-{buildName}-{phaseName}.json"; + string metadataFileName = $"diagnostics-{buildName}-{IOUtil.ReplaceInvalidFileNameChars(phaseName)}.json"; string metadataFilePath = Path.Combine(supportFilesFolder, metadataFileName); string phaseResult = GetTaskResultAsString(executionContext.Result); diff --git a/src/Runner.Worker/ExecutionContext.cs b/src/Runner.Worker/ExecutionContext.cs index 77c145d1d..e64c6e24a 100644 --- a/src/Runner.Worker/ExecutionContext.cs +++ b/src/Runner.Worker/ExecutionContext.cs @@ -83,7 +83,7 @@ namespace GitHub.Runner.Worker // Initialize void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token); void CancelToken(); - IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null, TimeSpan? timeout = null); + IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, List embeddedIssueCollector = null, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null, TimeSpan? timeout = null); IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary intraActionState = null, string siblingScopeName = null); // logging @@ -135,7 +135,6 @@ namespace GitHub.Runner.Worker private readonly TimelineRecord _record = new(); private readonly Dictionary _detailRecords = new(); - private readonly List _embeddedIssueCollector; private readonly object _loggerLock = new(); private readonly object _matchersLock = new(); private readonly ExecutionContext _parentExecutionContext; @@ -154,6 +153,7 @@ namespace GitHub.Runner.Worker private CancellationTokenSource _cancellationTokenSource; private TaskCompletionSource _forceCompleted = new(); private bool _throttlingReported = false; + private List _embeddedIssueCollector; // only job level ExecutionContext will track throttling delay. private long _totalThrottlingDelayInMilliseconds = 0; @@ -356,6 +356,7 @@ namespace GitHub.Runner.Worker int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, + List embeddedIssueCollector = null, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null, @@ -365,6 +366,10 @@ namespace GitHub.Runner.Worker var child = new ExecutionContext(this, isEmbedded); child.Initialize(HostContext); + if ((Global.Variables.GetBoolean("RunService.FixEmbeddedIssues") ?? false) && embeddedIssueCollector != null) + { + child._embeddedIssueCollector = embeddedIssueCollector; + } child.Global = Global; child.ScopeName = scopeName; child.ContextName = contextName; @@ -433,7 +438,7 @@ namespace GitHub.Runner.Worker Dictionary intraActionState = null, string siblingScopeName = null) { - return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order); + return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, embeddedIssueCollector: _embeddedIssueCollector, cancellationTokenSource: null, intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName, timeout: GetRemainingTimeout(), recordOrder: _record.Order); } public void Start(string currentOperation = null) @@ -503,6 +508,9 @@ namespace GitHub.Runner.Worker Status = _record.State, Number = _record.Order, Name = _record.Name, + ActionName = StepTelemetry?.Action, + Ref = StepTelemetry?.Ref, + Type = StepTelemetry?.Type, StartedAt = _record.StartTime, CompletedAt = _record.FinishTime, Annotations = new List() @@ -520,7 +528,6 @@ namespace GitHub.Runner.Worker Global.StepsResult.Add(stepResult); } - if (Root != this) { // only dispose TokenSource for step level ExecutionContext @@ -808,11 +815,6 @@ namespace GitHub.Runner.Worker Global.Variables = new Variables(HostContext, variables); - if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo16") ?? false) - { - Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node16"); - } - // Environment variables shared across all actions Global.EnvironmentVariables = new Dictionary(VarUtil.EnvironmentVariableKeyComparer); @@ -837,7 +839,6 @@ namespace GitHub.Runner.Worker // Actions environment ActionsEnvironment = message.ActionsEnvironment; - // Service container info Global.ServiceContainers = new List(); @@ -861,7 +862,21 @@ namespace GitHub.Runner.Worker ExpressionValues["secrets"] = Global.Variables.ToSecretsContext(); ExpressionValues["runner"] = new RunnerContext(); - ExpressionValues["job"] = new JobContext(); + + Trace.Info("Initializing Job context"); + var jobContext = new JobContext(); + if (Global.Variables.GetBoolean(Constants.Runner.Features.AddCheckRunIdToJobContext) ?? false) + { + ExpressionValues.TryGetValue("job", out var jobDictionary); + if (jobDictionary != null) + { + foreach (var pair in jobDictionary.AssertDictionary("job")) + { + jobContext[pair.Key] = pair.Value; + } + } + } + ExpressionValues["job"] = jobContext; Trace.Info("Initialize GitHub context"); var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token")); @@ -1418,7 +1433,7 @@ namespace GitHub.Runner.Worker { if (key == PipelineTemplateConstants.HostWorkspace) { - // The HostWorkspace context var is excluded so that there is a var that always points to the host path. + // The HostWorkspace context var is excluded so that there is a var that always points to the host path. // This var can be used to translate back from container paths, e.g. in HashFilesFunction, which always runs on the host machine continue; } diff --git a/src/Runner.Worker/FileCommandManager.cs b/src/Runner.Worker/FileCommandManager.cs index b03c31890..0021aa527 100644 --- a/src/Runner.Worker/FileCommandManager.cs +++ b/src/Runner.Worker/FileCommandManager.cs @@ -244,7 +244,7 @@ namespace GitHub.Runner.Worker if (resultsReceiverEndpoint != null) { Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})"); - var stepId = context.Id; + var stepId = context.IsEmbedded ? context.EmbeddedId : context.Id; // Attachments must be added to the parent context (job), not the current context (step) context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId); } diff --git a/src/Runner.Worker/Handlers/ContainerActionHandler.cs b/src/Runner.Worker/Handlers/ContainerActionHandler.cs index eb75bb59c..775ce2f04 100644 --- a/src/Runner.Worker/Handlers/ContainerActionHandler.cs +++ b/src/Runner.Worker/Handlers/ContainerActionHandler.cs @@ -223,6 +223,10 @@ namespace GitHub.Runner.Worker.Handlers { Environment["ACTIONS_CACHE_URL"] = cacheUrl; } + if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl)) + { + Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl; + } if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl)) { Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl; diff --git a/src/Runner.Worker/Handlers/HandlerFactory.cs b/src/Runner.Worker/Handlers/HandlerFactory.cs index 5f1fce0cf..ee022ec9d 100644 --- a/src/Runner.Worker/Handlers/HandlerFactory.cs +++ b/src/Runner.Worker/Handlers/HandlerFactory.cs @@ -57,33 +57,44 @@ namespace GitHub.Runner.Worker.Handlers handler = HostContext.CreateService(); var nodeData = data as NodeJSActionExecutionData; - // With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server - if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase)) + // With node12 EoL in 04/2022 and node16 EoL in 09/23, we want to execute all JS actions using node20 + // With node20 EoL approaching, we're preparing to migrate to node24 + if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) || + string.Equals(nodeData.NodeVersion, "node16", StringComparison.InvariantCultureIgnoreCase)) { - var repoAction = action as Pipelines.RepositoryPathReference; - if (repoAction != null) - { - var warningActions = new HashSet(); - if (executionContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings)) - { - warningActions = StringUtil.ConvertFromJson>(node16ForceWarnings); - } - - string repoActionFullName; - if (string.IsNullOrEmpty(repoAction.Name)) - { - repoActionFullName = repoAction.Path; // local actions don't have a 'Name' - } - else - { - repoActionFullName = $"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}"; - } - - warningActions.Add(repoActionFullName); - executionContext.Global.Variables.Set("Node16ForceActionsWarnings", StringUtil.ConvertToJson(warningActions)); - } - nodeData.NodeVersion = "node16"; + nodeData.NodeVersion = Common.Constants.Runner.NodeMigration.Node20; } + + // Check if node20 was explicitly specified in the action + // We don't modify if node24 was explicitly specified + if (string.Equals(nodeData.NodeVersion, Constants.Runner.NodeMigration.Node20, StringComparison.InvariantCultureIgnoreCase)) + { + bool useNode24ByDefault = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.UseNode24ByDefaultFlag) ?? false; + bool requireNode24 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.RequireNode24Flag) ?? false; + + var (nodeVersion, configWarningMessage) = NodeUtil.DetermineActionsNodeVersion(environment, useNode24ByDefault, requireNode24); + var (finalNodeVersion, platformWarningMessage) = NodeUtil.CheckNodeVersionForLinuxArm32(nodeVersion); + nodeData.NodeVersion = finalNodeVersion; + + if (!string.IsNullOrEmpty(configWarningMessage)) + { + executionContext.Warning(configWarningMessage); + } + + if (!string.IsNullOrEmpty(platformWarningMessage)) + { + executionContext.Warning(platformWarningMessage); + } + + // Show information about Node 24 migration in Phase 2 + if (useNode24ByDefault && !requireNode24 && string.Equals(finalNodeVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase)) + { + string infoMessage = "Node 20 is being deprecated. This workflow is running with Node 24 by default. " + + "If you need to temporarily use Node 20, you can set the ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION=true environment variable."; + executionContext.Output(infoMessage); + } + } + (handler as INodeScriptActionHandler).Data = nodeData; } else if (data.ExecutionType == ActionExecutionType.Script) diff --git a/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs b/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs index 32d4eb084..a399f13d1 100644 --- a/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs +++ b/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs @@ -58,6 +58,10 @@ namespace GitHub.Runner.Worker.Handlers { Environment["ACTIONS_CACHE_URL"] = cacheUrl; } + if (systemConnection.Data.TryGetValue("PipelinesServiceUrl", out var pipelinesServiceUrl) && !string.IsNullOrEmpty(pipelinesServiceUrl)) + { + Environment["ACTIONS_RUNTIME_URL"] = pipelinesServiceUrl; + } if (systemConnection.Data.TryGetValue("GenerateIdTokenUrl", out var generateIdTokenUrl) && !string.IsNullOrEmpty(generateIdTokenUrl)) { Environment["ACTIONS_ID_TOKEN_REQUEST_URL"] = generateIdTokenUrl; @@ -68,6 +72,11 @@ namespace GitHub.Runner.Worker.Handlers Environment["ACTIONS_RESULTS_URL"] = resultsUrl; } + if (ExecutionContext.Global.Variables.GetBoolean("actions_uses_cache_service_v2") ?? false) + { + Environment["ACTIONS_CACHE_SERVICE_V2"] = bool.TrueString; + } + // Resolve the target script. string target = null; if (stage == ActionRunStage.Main) @@ -89,7 +98,6 @@ namespace GitHub.Runner.Worker.Handlers ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre; ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost; } - ExecutionContext.StepTelemetry.Type = Data.NodeVersion; ArgUtil.NotNullOrEmpty(target, nameof(target)); target = Path.Combine(ActionDirectory, target); @@ -102,19 +110,8 @@ namespace GitHub.Runner.Worker.Handlers workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); } - if (string.Equals(Data.NodeVersion, "node12", StringComparison.OrdinalIgnoreCase) && - Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm64)) - { - ExecutionContext.Output($"The node12 is not supported. Use node16 instead."); - Data.NodeVersion = "node16"; - } - - string forcedNodeVersion = System.Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedActionsNodeVersion); - if (forcedNodeVersion == "node16" && Data.NodeVersion != "node16") - { - Data.NodeVersion = "node16"; - } var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext, Data.NodeVersion); + ExecutionContext.StepTelemetry.Type = nodeRuntimeVersion; string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}"); // Format the arguments passed to node. @@ -134,28 +131,6 @@ namespace GitHub.Runner.Worker.Handlers // Remove environment variable that may cause conflicts with the node within the runner. Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795 - if (string.Equals(Data.NodeVersion, Constants.Runner.DeprecatedNodeVersion, StringComparison.OrdinalIgnoreCase) && (ExecutionContext.Global.Variables.GetBoolean(Constants.Runner.Features.Node16Warning) ?? false)) - { - var repoAction = Action as RepositoryPathReference; - var warningActions = new HashSet(); - if (ExecutionContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings)) - { - warningActions = StringUtil.ConvertFromJson>(deprecatedNodeWarnings); - } - - if (string.IsNullOrEmpty(repoAction.Name)) - { - // local actions don't have a 'Name' - warningActions.Add(repoAction.Path); - } - else - { - warningActions.Add($"{repoAction.Name}/{repoAction.Path ?? string.Empty}".TrimEnd('/') + $"@{repoAction.Ref}"); - } - - ExecutionContext.Global.Variables.Set(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, StringUtil.ConvertToJson(warningActions)); - } - using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager)) using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager)) { diff --git a/src/Runner.Worker/Handlers/ScriptHandler.cs b/src/Runner.Worker/Handlers/ScriptHandler.cs index 30114f27c..e6fa90a0a 100644 --- a/src/Runner.Worker/Handlers/ScriptHandler.cs +++ b/src/Runner.Worker/Handlers/ScriptHandler.cs @@ -83,40 +83,19 @@ namespace GitHub.Runner.Worker.Handlers shellCommand = "pwsh"; if (validateShellOnHost) { - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - shellCommandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath); - } - else - { - shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath); - } + shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath); if (string.IsNullOrEmpty(shellCommandPath)) { shellCommand = "powershell"; - Trace.Info($"Defaulting to {shellCommand}"); - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - shellCommandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath); - } - else - { - shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath); - } + Trace.Info($"Defaulting to {shellCommand}"); + shellCommandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath); } } #else shellCommand = "sh"; if (validateShellOnHost) { - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - shellCommandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath); - } - else - { - shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath); - } + shellCommandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath); } #endif argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); @@ -127,14 +106,7 @@ namespace GitHub.Runner.Worker.Handlers shellCommand = parsed.shellCommand; if (validateShellOnHost) { - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - shellCommandPath = WhichUtil.Which2(parsed.shellCommand, true, Trace, prependPath); - } - else - { - shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath); - } + shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace, prependPath); } argFormat = $"{parsed.shellArgs}".TrimStart(); @@ -216,38 +188,17 @@ namespace GitHub.Runner.Worker.Handlers { #if OS_WINDOWS shellCommand = "pwsh"; - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - commandPath = WhichUtil.Which2(shellCommand, require: false, Trace, prependPath); - } - else - { - commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath); - } + commandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath); if (string.IsNullOrEmpty(commandPath)) { shellCommand = "powershell"; Trace.Info($"Defaulting to {shellCommand}"); - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - commandPath = WhichUtil.Which2(shellCommand, require: true, Trace, prependPath); - } - else - { - commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath); - } + commandPath = WhichUtil.Which(shellCommand, require: true, Trace, prependPath); } ArgUtil.NotNullOrEmpty(commandPath, "Default Shell"); #else shellCommand = "sh"; - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - commandPath = WhichUtil.Which2("bash", false, Trace, prependPath) ?? WhichUtil.Which2("sh", true, Trace, prependPath); - } - else - { - commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath); - } + commandPath = WhichUtil.Which("bash", false, Trace, prependPath) ?? WhichUtil.Which("sh", true, Trace, prependPath); #endif argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); } @@ -258,14 +209,7 @@ namespace GitHub.Runner.Worker.Handlers if (!IsActionStep && systemShells.Contains(shell)) { shellCommand = shell; - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - commandPath = WhichUtil.Which2(shell, !isContainerStepHost, Trace, prependPath); - } - else - { - commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath); - } + commandPath = WhichUtil.Which(shell, !isContainerStepHost, Trace, prependPath); if (shell == "bash") { argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat("sh"); @@ -280,14 +224,7 @@ namespace GitHub.Runner.Worker.Handlers var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell); shellCommand = parsed.shellCommand; // For non-ContainerStepHost, the command must be located on the host by Which - if (ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseWhich2") == true) - { - commandPath = WhichUtil.Which2(parsed.shellCommand, !isContainerStepHost, Trace, prependPath); - } - else - { - commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath); - } + commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace, prependPath); argFormat = $"{parsed.shellArgs}".TrimStart(); if (string.IsNullOrEmpty(argFormat)) { diff --git a/src/Runner.Worker/Handlers/StepHost.cs b/src/Runner.Worker/Handlers/StepHost.cs index 1270dd90e..211009658 100644 --- a/src/Runner.Worker/Handlers/StepHost.cs +++ b/src/Runner.Worker/Handlers/StepHost.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using GitHub.DistributedTask.Pipelines.ContextData; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -9,7 +8,6 @@ using GitHub.Runner.Common; using GitHub.Runner.Sdk; using System.Linq; using GitHub.Runner.Worker.Container.ContainerHooks; -using System.IO; using System.Threading.Channels; namespace GitHub.Runner.Worker.Handlers @@ -60,7 +58,14 @@ namespace GitHub.Runner.Worker.Handlers public Task DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion) { - return Task.FromResult(preferredVersion); + // Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux + var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion); + if (!string.IsNullOrEmpty(warningMessage)) + { + executionContext.Warning(warningMessage); + } + + return Task.FromResult(nodeVersion); } public async Task ExecuteAsync(IExecutionContext context, @@ -137,8 +142,12 @@ namespace GitHub.Runner.Worker.Handlers public async Task DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion) { - // Optimistically use the default - string nodeExternal = preferredVersion; + // Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux + var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion); + if (!string.IsNullOrEmpty(warningMessage)) + { + executionContext.Warning(warningMessage); + } if (FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables)) { @@ -264,7 +273,14 @@ namespace GitHub.Runner.Worker.Handlers private string CheckPlatformForAlpineContainer(IExecutionContext executionContext, string preferredVersion) { - string nodeExternal = preferredVersion; + // Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux + var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion); + if (!string.IsNullOrEmpty(warningMessage)) + { + executionContext.Warning(warningMessage); + } + + // Check for Alpine container compatibility if (!Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.X64)) { var os = Constants.Runner.Platform.ToString(); diff --git a/src/Runner.Worker/IssueMatcher.cs b/src/Runner.Worker/IssueMatcher.cs index 35c1f881c..4089d93da 100644 --- a/src/Runner.Worker/IssueMatcher.cs +++ b/src/Runner.Worker/IssueMatcher.cs @@ -21,6 +21,7 @@ namespace GitHub.Runner.Worker public sealed class IssueMatcher { private string _defaultSeverity; + private string _defaultFromPath; private string _owner; private IssuePattern[] _patterns; private IssueMatch[] _state; @@ -29,6 +30,7 @@ namespace GitHub.Runner.Worker { _owner = config.Owner; _defaultSeverity = config.Severity; + _defaultFromPath = config.FromPath; _patterns = config.Patterns.Select(x => new IssuePattern(x, timeout)).ToArray(); Reset(); } @@ -59,6 +61,19 @@ namespace GitHub.Runner.Worker } } + public string DefaultFromPath + { + get + { + if (_defaultFromPath == null) + { + _defaultFromPath = string.Empty; + } + + return _defaultFromPath; + } + } + public IssueMatch Match(string line) { // Single pattern @@ -69,7 +84,7 @@ namespace GitHub.Runner.Worker if (regexMatch.Success) { - return new IssueMatch(null, pattern, regexMatch.Groups, DefaultSeverity); + return new IssueMatch(null, pattern, regexMatch.Groups, DefaultSeverity, DefaultFromPath); } return null; @@ -110,7 +125,7 @@ namespace GitHub.Runner.Worker } // Return - return new IssueMatch(runningMatch, pattern, regexMatch.Groups, DefaultSeverity); + return new IssueMatch(runningMatch, pattern, regexMatch.Groups, DefaultSeverity, DefaultFromPath); } // Not the last pattern else @@ -184,7 +199,7 @@ namespace GitHub.Runner.Worker public sealed class IssueMatch { - public IssueMatch(IssueMatch runningMatch, IssuePattern pattern, GroupCollection groups, string defaultSeverity = null) + public IssueMatch(IssueMatch runningMatch, IssuePattern pattern, GroupCollection groups, string defaultSeverity = null, string defaultFromPath = null) { File = runningMatch?.File ?? GetValue(groups, pattern.File); Line = runningMatch?.Line ?? GetValue(groups, pattern.Line); @@ -198,6 +213,11 @@ namespace GitHub.Runner.Worker { Severity = defaultSeverity; } + + if (string.IsNullOrEmpty(FromPath) && !string.IsNullOrEmpty(defaultFromPath)) + { + FromPath = defaultFromPath; + } } public string File { get; } @@ -282,6 +302,9 @@ namespace GitHub.Runner.Worker [DataMember(Name = "pattern")] private IssuePatternConfig[] _patterns; + [DataMember(Name = "fromPath")] + private string _fromPath; + public string Owner { get @@ -318,6 +341,24 @@ namespace GitHub.Runner.Worker } } + public string FromPath + { + get + { + if (_fromPath == null) + { + _fromPath = string.Empty; + } + + return _fromPath; + } + + set + { + _fromPath = value; + } + } + public IssuePatternConfig[] Patterns { get diff --git a/src/Runner.Worker/JobContext.cs b/src/Runner.Worker/JobContext.cs index e3760560f..09f3296de 100644 --- a/src/Runner.Worker/JobContext.cs +++ b/src/Runner.Worker/JobContext.cs @@ -1,4 +1,4 @@ -using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.ContextData; using GitHub.Runner.Common.Util; using GitHub.Runner.Common; @@ -56,5 +56,31 @@ namespace GitHub.Runner.Worker } } } + + public double? CheckRunId + { + get + { + if (this.TryGetValue("check_run_id", out var value) && value is NumberContextData number) + { + return number.Value; + } + else + { + return null; + } + } + set + { + if (value.HasValue) + { + this["check_run_id"] = new NumberContextData(value.Value); + } + else + { + this["check_run_id"] = null; + } + } + } } } diff --git a/src/Runner.Worker/JobExtension.cs b/src/Runner.Worker/JobExtension.cs index c010fa908..58e8929b4 100644 --- a/src/Runner.Worker/JobExtension.cs +++ b/src/Runner.Worker/JobExtension.cs @@ -17,6 +17,7 @@ using GitHub.Runner.Common; using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; using GitHub.Services.Common; +using Newtonsoft.Json; using Pipelines = GitHub.DistributedTask.Pipelines; namespace GitHub.Runner.Worker @@ -42,11 +43,13 @@ namespace GitHub.Runner.Worker public sealed class JobExtension : RunnerService, IJobExtension { private readonly HashSet _existingProcesses = new(StringComparer.OrdinalIgnoreCase); - private readonly List> _connectivityCheckTasks = new(); + private readonly List> _connectivityCheckTasks = new(); private bool _processCleanup; private string _processLookupId = $"github_{Guid.NewGuid()}"; private CancellationTokenSource _diskSpaceCheckToken = new(); private Task _diskSpaceCheckTask = null; + private CancellationTokenSource _serviceConnectivityCheckToken = new(); + private Task _serviceConnectivityCheckTask = null; // Download all required actions. // Make sure all condition inputs are valid. @@ -392,6 +395,18 @@ namespace GitHub.Runner.Worker } } + // Register custom image creation post-job step if the "snapshot" token is present in the message. + var snapshotRequest = templateEvaluator.EvaluateJobSnapshotRequest(message.Snapshot, jobContext.ExpressionValues, jobContext.ExpressionFunctions); + if (snapshotRequest != null) + { + var snapshotOperationProvider = HostContext.GetService(); + jobContext.RegisterPostJobStep(new JobExtensionRunner( + runAsync: (executionContext, _) => snapshotOperationProvider.CreateSnapshotRequestAsync(executionContext, snapshotRequest), + condition: snapshotRequest.Condition, + displayName: $"Create custom image", + data: null)); + } + // Register Job Completed hook if the variable is set var completedHookPath = Environment.GetEnvironmentVariable("ACTIONS_RUNNER_HOOK_JOB_COMPLETED"); if (!string.IsNullOrEmpty(completedHookPath)) @@ -442,11 +457,14 @@ namespace GitHub.Runner.Worker { foreach (var checkUrl in checkUrls) { - _connectivityCheckTasks.Add(CheckConnectivity(checkUrl)); + _connectivityCheckTasks.Add(CheckConnectivity(checkUrl, accessToken: string.Empty, timeoutInSeconds: 5, token: CancellationToken.None)); } } } + Trace.Info($"Start checking service connectivity in background."); + _serviceConnectivityCheckTask = CheckServiceConnectivityAsync(context, _serviceConnectivityCheckToken.Token); + return steps; } catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested) @@ -680,7 +698,7 @@ namespace GitHub.Runner.Worker { var result = await check; Trace.Info($"Connectivity check result: {result}"); - context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = result }); + context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = $"{result.EndpointUrl}: {result.StatusCode}" }); } } catch (Exception ex) @@ -690,6 +708,22 @@ namespace GitHub.Runner.Worker context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = $"Fail to check server connectivity. {ex.Message}" }); } } + + // Collect service connectivity check result + if (_serviceConnectivityCheckTask != null) + { + _serviceConnectivityCheckToken.Cancel(); + try + { + await _serviceConnectivityCheckTask; + } + catch (Exception ex) + { + Trace.Error($"Fail to check service connectivity."); + Trace.Error(ex); + context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = $"Fail to check service connectivity. {ex.Message}" }); + } + } } catch (Exception ex) { @@ -705,11 +739,13 @@ namespace GitHub.Runner.Worker } } - private async Task CheckConnectivity(string endpointUrl) + private async Task CheckConnectivity(string endpointUrl, string accessToken, int timeoutInSeconds, CancellationToken token) { Trace.Info($"Check server connectivity for {endpointUrl}."); - string result = string.Empty; - using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(5))) + CheckResult result = new CheckResult() { EndpointUrl = endpointUrl }; + var stopwatch = Stopwatch.StartNew(); + using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutInSeconds))) + using (var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token, timeoutTokenSource.Token)) { try { @@ -717,21 +753,44 @@ namespace GitHub.Runner.Worker using (var httpClient = new HttpClient(httpClientHandler)) { httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents); - var response = await httpClient.GetAsync(endpointUrl, timeoutTokenSource.Token); - result = $"{endpointUrl}: {response.StatusCode}"; + if (!string.IsNullOrEmpty(accessToken)) + { + httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {accessToken}"); + } + + var response = await httpClient.GetAsync(endpointUrl, linkedTokenSource.Token); + result.StatusCode = $"{response.StatusCode}"; + + var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers); + var vssRequestId = UrlUtil.GetVssRequestId(response.Headers); + if (!string.IsNullOrEmpty(githubRequestId)) + { + result.RequestId = githubRequestId; + } + else if (!string.IsNullOrEmpty(vssRequestId)) + { + result.RequestId = vssRequestId; + } } } + catch (Exception ex) when (ex is OperationCanceledException && token.IsCancellationRequested) + { + Trace.Error($"Request canceled during connectivity check: {ex}"); + result.StatusCode = "canceled"; + } catch (Exception ex) when (ex is OperationCanceledException && timeoutTokenSource.IsCancellationRequested) { Trace.Error($"Request timeout during connectivity check: {ex}"); - result = $"{endpointUrl}: timeout"; + result.StatusCode = "timeout"; } catch (Exception ex) { Trace.Error($"Catch exception during connectivity check: {ex}"); - result = $"{endpointUrl}: {ex.Message}"; + result.StatusCode = $"{ex.Message}"; } } + stopwatch.Stop(); + result.DurationInMs = (int)stopwatch.ElapsedMilliseconds; return result; } @@ -769,6 +828,84 @@ namespace GitHub.Runner.Worker } } + private async Task CheckServiceConnectivityAsync(IExecutionContext context, CancellationToken token) + { + var connectionTest = context.Global.Variables.Get(WellKnownDistributedTaskVariables.RunnerServiceConnectivityTest); + if (string.IsNullOrEmpty(connectionTest)) + { + return; + } + + ServiceConnectivityCheckInput checkConnectivityInfo; + try + { + checkConnectivityInfo = StringUtil.ConvertFromJson(connectionTest); + } + catch (Exception ex) + { + context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.General, Message = $"Fail to parse JSON. {ex.Message}" }); + return; + } + + if (checkConnectivityInfo == null) + { + return; + } + + // make sure interval is at least 10 seconds + checkConnectivityInfo.IntervalInSecond = Math.Max(10, checkConnectivityInfo.IntervalInSecond); + + var systemConnection = context.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + var accessToken = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken]; + + var testResult = new ServiceConnectivityCheckResult(); + while (!token.IsCancellationRequested) + { + foreach (var endpoint in checkConnectivityInfo.Endpoints) + { + if (string.IsNullOrEmpty(endpoint.Key) || string.IsNullOrEmpty(endpoint.Value)) + { + continue; + } + + if (!testResult.EndpointsResult.ContainsKey(endpoint.Key)) + { + testResult.EndpointsResult[endpoint.Key] = new List(); + } + + try + { + var result = await CheckConnectivity(endpoint.Value, accessToken: accessToken, timeoutInSeconds: checkConnectivityInfo.RequestTimeoutInSecond, token); + testResult.EndpointsResult[endpoint.Key].Add($"{result.StartTime:s}: {result.StatusCode} - {result.RequestId} - {result.DurationInMs}ms"); + if (!testResult.HasFailure && + result.StatusCode != "OK" && + result.StatusCode != "canceled") + { + // track if any endpoint is not reachable + testResult.HasFailure = true; + } + } + catch (Exception ex) + { + testResult.EndpointsResult[endpoint.Key].Add($"{DateTime.UtcNow:s}: {ex.Message}"); + } + } + + try + { + await Task.Delay(TimeSpan.FromSeconds(checkConnectivityInfo.IntervalInSecond), token); + } + catch (TaskCanceledException) + { + // ignore + } + } + + var telemetryData = StringUtil.ConvertToJson(testResult, Formatting.None); + Trace.Verbose($"Connectivity check result: {telemetryData}"); + context.Global.JobTelemetry.Add(new JobTelemetry() { Type = JobTelemetryType.ConnectivityCheck, Message = telemetryData }); + } + private Dictionary SnapshotProcesses() { Dictionary snapshot = new(); @@ -800,5 +937,23 @@ namespace GitHub.Runner.Worker throw new ArgumentException("Jobs without a job container are forbidden on this runner, please add a 'container:' to your job or contact your self-hosted runner administrator."); } } + + private class CheckResult + { + public CheckResult() + { + StartTime = DateTime.UtcNow; + } + + public string EndpointUrl { get; set; } + + public DateTime StartTime { get; set; } + + public string StatusCode { get; set; } + + public string RequestId { get; set; } + + public int DurationInMs { get; set; } + } } } diff --git a/src/Runner.Worker/JobRunner.cs b/src/Runner.Worker/JobRunner.cs index 6db477214..1390af13b 100644 --- a/src/Runner.Worker/JobRunner.cs +++ b/src/Runner.Worker/JobRunner.cs @@ -15,6 +15,7 @@ using GitHub.Runner.Common.Util; using GitHub.Runner.Sdk; using GitHub.Services.Common; using GitHub.Services.WebApi; +using Sdk.RSWebApi.Contracts; using Pipelines = GitHub.DistributedTask.Pipelines; namespace GitHub.Runner.Worker @@ -42,25 +43,23 @@ namespace GitHub.Runner.Worker Trace.Info("Job ID {0}", message.JobId); DateTime jobStartTimeUtc = DateTime.UtcNow; + _runnerSettings = HostContext.GetService().GetSettings(); IRunnerService server = null; // add orchestration id to useragent for better correlation. if (message.Variables.TryGetValue(Constants.Variables.System.OrchestrationId, out VariableValue orchestrationId) && !string.IsNullOrEmpty(orchestrationId.Value)) { - HostContext.UserAgents.Add(new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value)); + if (!HostContext.UserAgents.Any(x => string.Equals(x.Product?.Name, "OrchestrationId", StringComparison.OrdinalIgnoreCase))) + { + // make the orchestration id the first item in the user-agent header to avoid get truncated in server log. + HostContext.UserAgents.Insert(0, new ProductInfoHeaderValue("OrchestrationId", orchestrationId.Value)); + } // make sure orchestration id is in the user-agent header. VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy); } - var jobServerQueueTelemetry = false; - if (message.Variables.TryGetValue("DistributedTask.EnableJobServerQueueTelemetry", out VariableValue enableJobServerQueueTelemetry) && - !string.IsNullOrEmpty(enableJobServerQueueTelemetry?.Value)) - { - jobServerQueueTelemetry = StringUtil.ConvertToBoolean(enableJobServerQueueTelemetry.Value); - } - ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); if (MessageUtil.IsRunServiceJob(message.MessageType)) { @@ -82,7 +81,7 @@ namespace GitHub.Runner.Worker launchServer.InitializeLaunchClient(new Uri(launchReceiverEndpoint), accessToken); } _jobServerQueue = HostContext.GetService(); - _jobServerQueue.Start(message, resultsServiceOnly: true, enableTelemetry: jobServerQueueTelemetry); + _jobServerQueue.Start(message, resultsServiceOnly: true); } else { @@ -104,7 +103,7 @@ namespace GitHub.Runner.Worker VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers); await jobServer.ConnectAsync(jobConnection); - _jobServerQueue.Start(message, enableTelemetry: jobServerQueueTelemetry); + _jobServerQueue.Start(message); server = jobServer; } @@ -164,8 +163,6 @@ namespace GitHub.Runner.Worker jobContext.SetRunnerContext("os", VarUtil.OS); jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture); - - _runnerSettings = HostContext.GetService().GetSettings(); jobContext.SetRunnerContext("name", _runnerSettings.AgentName); if (jobContext.Global.Variables.TryGetValue(WellKnownDistributedTaskVariables.RunnerEnvironment, out var runnerEnvironment)) @@ -286,20 +283,14 @@ namespace GitHub.Runner.Worker { jobContext.Debug($"Finishing: {message.JobDisplayName}"); TaskResult result = jobContext.Complete(taskResult); - if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings)) - { - var actions = string.Join(", ", StringUtil.ConvertFromJson>(deprecatedNodeWarnings)); - jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions)); - } - if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings)) + var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: false); + // include any job telemetry from the background upload process. + if (jobQueueTelemetry?.Count > 0) { - var actions = string.Join(", ", StringUtil.ConvertFromJson>(node16ForceWarnings)); - jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions)); + jobContext.Global.JobTelemetry.AddRange(jobQueueTelemetry); } - await ShutdownQueue(throwOnFailure: false); - // Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir. _tempDirectoryManager?.CleanupTempDirectory(); @@ -316,6 +307,13 @@ namespace GitHub.Runner.Worker environmentUrl = urlStringToken.Value; } + // Get telemetry + IList telemetry = null; + if (jobContext.Global.JobTelemetry.Count > 0) + { + telemetry = jobContext.Global.JobTelemetry.Select(x => new Telemetry { Type = x.Type.ToString(), Message = x.Message, }).ToList(); + } + Trace.Info($"Raising job completed against run service"); var completeJobRetryLimit = 5; var exceptions = new List(); @@ -323,9 +321,23 @@ namespace GitHub.Runner.Worker { try { - await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, default); + await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, jobContext.Global.JobAnnotations, environmentUrl, telemetry, billingOwnerId: message.BillingOwnerId, default); return result; } + catch (VssUnauthorizedException ex) + { + Trace.Error($"Catch exception while attempting to complete job {message.JobId}, job request {message.RequestId}."); + Trace.Error(ex); + exceptions.Add(ex); + break; + } + catch (TaskOrchestrationJobNotFoundException ex) + { + Trace.Error($"Catch exception while attempting to complete job {message.JobId}, job request {message.RequestId}."); + Trace.Error(ex); + exceptions.Add(ex); + break; + } catch (Exception ex) { Trace.Error($"Catch exception while attempting to complete job {message.JobId}, job request {message.RequestId}."); @@ -348,68 +360,14 @@ namespace GitHub.Runner.Worker if (_runnerSettings.DisableUpdate == true) { - try - { - var currentVersion = new PackageVersion(BuildConstants.RunnerPackage.Version); - ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); - VssCredentials serverCredential = VssUtil.GetVssCredential(systemConnection); - - var runnerServer = HostContext.GetService(); - await runnerServer.ConnectAsync(systemConnection.Url, serverCredential); - var serverPackages = await runnerServer.GetPackagesAsync("agent", BuildConstants.RunnerPackage.PackageName, 5, includeToken: false, cancellationToken: CancellationToken.None); - if (serverPackages.Count > 0) - { - serverPackages = serverPackages.OrderByDescending(x => x.Version).ToList(); - Trace.Info($"Newer packages {StringUtil.ConvertToJson(serverPackages.Select(x => x.Version.ToString()))}"); - - var warnOnFailedJob = false; // any minor/patch version behind. - var warnOnOldRunnerVersion = false; // >= 2 minor version behind - if (serverPackages.Any(x => x.Version.CompareTo(currentVersion) > 0)) - { - Trace.Info($"Current runner version {currentVersion} is behind the latest runner version {serverPackages[0].Version}."); - warnOnFailedJob = true; - } - - if (serverPackages.Where(x => x.Version.Major == currentVersion.Major && x.Version.Minor > currentVersion.Minor).Count() > 1) - { - Trace.Info($"Current runner version {currentVersion} is way behind the latest runner version {serverPackages[0].Version}."); - warnOnOldRunnerVersion = true; - } - - if (result == TaskResult.Failed && warnOnFailedJob) - { - jobContext.Warning($"This job failure may be caused by using an out of date self-hosted runner. You are currently using runner version {currentVersion}. Please update to the latest version {serverPackages[0].Version}"); - } - else if (warnOnOldRunnerVersion) - { - jobContext.Warning($"This self-hosted runner is currently using runner version {currentVersion}. This version is out of date. Please update to the latest version {serverPackages[0].Version}"); - } - } - } - catch (Exception ex) - { - // Ignore any error since suggest runner update is best effort. - Trace.Error($"Caught exception during runner version check: {ex}"); - } - } - - if (jobContext.Global.Variables.TryGetValue(Constants.Runner.DeprecatedNodeDetectedAfterEndOfLifeActions, out var deprecatedNodeWarnings)) - { - var actions = string.Join(", ", StringUtil.ConvertFromJson>(deprecatedNodeWarnings)); - jobContext.Warning(string.Format(Constants.Runner.DetectedNodeAfterEndOfLifeMessage, actions)); - } - - if (jobContext.Global.Variables.TryGetValue(Constants.Runner.EnforcedNode12DetectedAfterEndOfLifeEnvVariable, out var node16ForceWarnings)) - { - var actions = string.Join(", ", StringUtil.ConvertFromJson>(node16ForceWarnings)); - jobContext.Warning(string.Format(Constants.Runner.EnforcedNode12DetectedAfterEndOfLife, actions)); + await WarningOutdatedRunnerAsync(jobContext, message, result); } try { var jobQueueTelemetry = await ShutdownQueue(throwOnFailure: true); // include any job telemetry from the background upload process. - if (jobQueueTelemetry.Count > 0) + if (jobQueueTelemetry?.Count > 0) { jobContext.Global.JobTelemetry.AddRange(jobQueueTelemetry); } @@ -537,5 +495,52 @@ namespace GitHub.Runner.Worker return Array.Empty(); } + + private async Task WarningOutdatedRunnerAsync(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult result) + { + try + { + var currentVersion = new PackageVersion(BuildConstants.RunnerPackage.Version); + ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + VssCredentials serverCredential = VssUtil.GetVssCredential(systemConnection); + + var runnerServer = HostContext.GetService(); + await runnerServer.ConnectAsync(systemConnection.Url, serverCredential); + var serverPackages = await runnerServer.GetPackagesAsync("agent", BuildConstants.RunnerPackage.PackageName, 5, includeToken: false, cancellationToken: CancellationToken.None); + if (serverPackages.Count > 0) + { + serverPackages = serverPackages.OrderByDescending(x => x.Version).ToList(); + Trace.Info($"Newer packages {StringUtil.ConvertToJson(serverPackages.Select(x => x.Version.ToString()))}"); + + var warnOnFailedJob = false; // any minor/patch version behind. + var warnOnOldRunnerVersion = false; // >= 2 minor version behind + if (serverPackages.Any(x => x.Version.CompareTo(currentVersion) > 0)) + { + Trace.Info($"Current runner version {currentVersion} is behind the latest runner version {serverPackages[0].Version}."); + warnOnFailedJob = true; + } + + if (serverPackages.Where(x => x.Version.Major == currentVersion.Major && x.Version.Minor > currentVersion.Minor).Count() > 1) + { + Trace.Info($"Current runner version {currentVersion} is way behind the latest runner version {serverPackages[0].Version}."); + warnOnOldRunnerVersion = true; + } + + if (result == TaskResult.Failed && warnOnFailedJob) + { + jobContext.Warning($"This job failure may be caused by using an out of date version of GitHub runner on your self-hosted runner. You are currently using GitHub runner version {currentVersion}. Please update to the latest version {serverPackages[0].Version}"); + } + else if (warnOnOldRunnerVersion) + { + jobContext.Warning($"This self-hosted runner is currently using runner version {currentVersion}. This version is out of date. Please update to the latest version {serverPackages[0].Version}"); + } + } + } + catch (Exception ex) + { + // Ignore any error since suggest runner update is best effort. + Trace.Error($"Caught exception during runner version check: {ex}"); + } + } } } diff --git a/src/Runner.Worker/Runner.Worker.csproj b/src/Runner.Worker/Runner.Worker.csproj index eee59b872..4470920e1 100644 --- a/src/Runner.Worker/Runner.Worker.csproj +++ b/src/Runner.Worker/Runner.Worker.csproj @@ -1,11 +1,12 @@ - net6.0 + net8.0 Exe win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) false true @@ -18,9 +19,9 @@ - - - + + + diff --git a/src/Runner.Worker/SnapshotOperationProvider.cs b/src/Runner.Worker/SnapshotOperationProvider.cs new file mode 100644 index 000000000..73630d498 --- /dev/null +++ b/src/Runner.Worker/SnapshotOperationProvider.cs @@ -0,0 +1,32 @@ +#nullable enable +using System.IO; +using System.Threading.Tasks; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +namespace GitHub.Runner.Worker; + +[ServiceLocator(Default = typeof(SnapshotOperationProvider))] +public interface ISnapshotOperationProvider : IRunnerService +{ + Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest); +} + +public class SnapshotOperationProvider : RunnerService, ISnapshotOperationProvider +{ + public Task CreateSnapshotRequestAsync(IExecutionContext executionContext, Snapshot snapshotRequest) + { + var snapshotRequestFilePath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json"); + var snapshotRequestDirectoryPath = Path.GetDirectoryName(snapshotRequestFilePath); + if (snapshotRequestDirectoryPath != null) + { + Directory.CreateDirectory(snapshotRequestDirectoryPath); + } + + IOUtil.SaveObject(snapshotRequest, snapshotRequestFilePath); + executionContext.Output($"Request written to: {snapshotRequestFilePath}"); + executionContext.Output("This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job."); + executionContext.Output("If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner."); + return Task.CompletedTask; + } +} diff --git a/src/Runner.Worker/StepsRunner.cs b/src/Runner.Worker/StepsRunner.cs index 4c88726da..83ce87f64 100644 --- a/src/Runner.Worker/StepsRunner.cs +++ b/src/Runner.Worker/StepsRunner.cs @@ -295,7 +295,7 @@ namespace GitHub.Runner.Worker !jobCancellationToken.IsCancellationRequested) { Trace.Error($"Caught timeout exception from step: {ex.Message}"); - step.ExecutionContext.Error("The action has timed out."); + step.ExecutionContext.Error($"The action '{step.DisplayName}' has timed out after {timeoutMinutes} minutes."); step.ExecutionContext.Result = TaskResult.Failed; } else diff --git a/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs b/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs index 6d75e381f..4d232c9e1 100644 --- a/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs +++ b/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Diagnostics.Tracing; using System.Globalization; +using System.Linq; using System.Net; using System.Net.Http; using System.Net.Sockets; @@ -335,7 +336,25 @@ namespace GitHub.Services.Common.Diagnostics if (IsEnabled()) { SetActivityId(activity); - HttpRequestStop(response.RequestMessage.GetHttpMethod(), response.RequestMessage.RequestUri.AbsoluteUri, (Int32)response.StatusCode); + var requestId = "NoExpectedHeader"; + if (response.Headers != null) + { + if (response.Headers.TryGetValues("x-github-request-id", out var headerValues) && headerValues != null) + { + requestId = headerValues.FirstOrDefault(); + } + else if (response.Headers.TryGetValues("x-vss-e2eid", out headerValues) && headerValues != null) + { + requestId = headerValues.FirstOrDefault(); + } + + if (string.IsNullOrEmpty(requestId)) + { + requestId = "NoExpectedHeader"; + } + } + + HttpRequestStop(response.RequestMessage.GetHttpMethod(), response.RequestMessage.RequestUri.AbsoluteUri, (Int32)response.StatusCode, requestId); } } @@ -747,15 +766,16 @@ namespace GitHub.Services.Common.Diagnostics } } - [Event(24, Level = EventLevel.Verbose, Task = Tasks.HttpRequest, Opcode = EventOpcode.Stop, Message = "Finished {0} request to {1} with status code {2}")] + [Event(24, Level = EventLevel.Verbose, Task = Tasks.HttpRequest, Opcode = EventOpcode.Stop, Message = "Finished {0} request to {1} with status code {2} ({3})")] private void HttpRequestStop( VssHttpMethod method, String url, - Int32 statusCode) + Int32 statusCode, + String requestId) { if (IsEnabled()) { - WriteEvent(24, (Int32)method, url, statusCode); + WriteEvent(24, (Int32)method, url, statusCode, requestId); } } diff --git a/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs b/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs index 34c97b73a..72367654b 100644 --- a/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs +++ b/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs @@ -34,6 +34,7 @@ namespace GitHub.Services.Common public String PropertyName { get; set; } + [Obsolete] [SecurityCritical] public override void GetObjectData(SerializationInfo info, StreamingContext context) { diff --git a/src/Sdk/Common/Common/RawHttpMessageHandler.cs b/src/Sdk/Common/Common/RawHttpMessageHandler.cs index 316bcd576..e80e6a747 100644 --- a/src/Sdk/Common/Common/RawHttpMessageHandler.cs +++ b/src/Sdk/Common/Common/RawHttpMessageHandler.cs @@ -106,6 +106,18 @@ namespace GitHub.Services.Common { VssTraceActivity traceActivity = VssTraceActivity.Current; + if (!m_appliedServerCertificateValidationCallbackToTransportHandler && + request.RequestUri.Scheme == "https") + { + HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler; + if (httpClientHandler != null && + this.Settings.ServerCertificateValidationCallback != null) + { + httpClientHandler.ServerCertificateCustomValidationCallback = this.Settings.ServerCertificateValidationCallback; + } + m_appliedServerCertificateValidationCallbackToTransportHandler = true; + } + lock (m_thisLock) { // Ensure that we attempt to use the most appropriate authentication mechanism by default. @@ -291,6 +303,7 @@ namespace GitHub.Services.Common } } + private bool m_appliedServerCertificateValidationCallbackToTransportHandler; private readonly HttpMessageHandler m_transportHandler; private HttpMessageInvoker m_messageInvoker; private CredentialWrapper m_credentialWrapper; diff --git a/src/Sdk/Common/Common/VssException.cs b/src/Sdk/Common/Common/VssException.cs index 7cead7865..5f8fb8c25 100644 --- a/src/Sdk/Common/Common/VssException.cs +++ b/src/Sdk/Common/Common/VssException.cs @@ -127,6 +127,7 @@ namespace GitHub.Services.Common EventId = (int)info.GetValue("m_eventId", typeof(int)); } + [Obsolete] [SecurityCritical] public override void GetObjectData(SerializationInfo info, StreamingContext context) { diff --git a/src/Sdk/Common/Common/VssHttpMessageHandler.cs b/src/Sdk/Common/Common/VssHttpMessageHandler.cs index 04a318d63..f48eec41a 100644 --- a/src/Sdk/Common/Common/VssHttpMessageHandler.cs +++ b/src/Sdk/Common/Common/VssHttpMessageHandler.cs @@ -214,25 +214,7 @@ namespace GitHub.Services.Common // ConfigureAwait(false) enables the continuation to be run outside any captured // SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking... - var tmpResponse = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); - if (Settings.AllowAutoRedirectForBroker && tmpResponse.StatusCode == HttpStatusCode.Redirect) - { - //Dispose of the previous response - tmpResponse?.Dispose(); - - var location = tmpResponse.Headers.Location; - request = new HttpRequestMessage(HttpMethod.Get, location); - - // Reapply the token to new redirected request - ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth); - - // Resend the request - response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); - } - else - { - response = tmpResponse; - } + response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); traceInfo?.TraceRequestSendTime(); diff --git a/src/Sdk/Common/Common/VssHttpRequestSettings.cs b/src/Sdk/Common/Common/VssHttpRequestSettings.cs index 6d67f9274..7279d2809 100644 --- a/src/Sdk/Common/Common/VssHttpRequestSettings.cs +++ b/src/Sdk/Common/Common/VssHttpRequestSettings.cs @@ -110,16 +110,6 @@ namespace GitHub.Services.Common set; } - /// - /// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing broker requests - /// This is special since this also sends token in the request, where as default AllowAutoRedirect does not - /// - public Boolean AllowAutoRedirectForBroker - { - get; - set; - } - /// /// Gets or sets a value indicating whether or not compression should be used on outgoing requests. /// The default value is true. diff --git a/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs b/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs index a084664b3..3ca676594 100644 --- a/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs +++ b/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs @@ -23,8 +23,8 @@ using System.IO; using System.IO.Compression; using System.Linq; using System.Net.Http; -using System.Net.Http.Headers; using System.Net.Http.Formatting; +using System.Net.Http.Headers; using System.Threading; using System.Threading.Tasks; using GitHub.Services.Common; @@ -827,5 +827,36 @@ namespace GitHub.DistributedTask.WebApi userState: userState, cancellationToken: cancellationToken); } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task RefreshRunnerConfigAsync( + int agentId, + string configType, + string encodedRunnerConfig, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("13b5d709-74aa-470b-a8e9-bf9f3ded3f18"); + object routeValues = new { agentId = agentId, configType = configType }; + HttpContent content = new ObjectContent(encodedRunnerConfig, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(6.0, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } } } diff --git a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs index 3f93e75e5..e6ecbf450 100644 --- a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs +++ b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs @@ -43,6 +43,7 @@ namespace GitHub.DistributedTask.Pipelines TemplateToken jobOutputs, IList defaults, ActionsEnvironmentReference actionsEnvironment, + TemplateToken snapshot, String messageType = JobRequestMessageTypes.PipelineAgentJobRequest) { this.MessageType = messageType; @@ -57,6 +58,7 @@ namespace GitHub.DistributedTask.Pipelines this.Workspace = workspaceOptions; this.JobOutputs = jobOutputs; this.ActionsEnvironment = actionsEnvironment; + this.Snapshot = snapshot; m_variables = new Dictionary(variables, StringComparer.OrdinalIgnoreCase); m_maskHints = new List(maskHints); m_steps = new List(steps); @@ -237,6 +239,20 @@ namespace GitHub.DistributedTask.Pipelines set; } + [DataMember(EmitDefaultValue = false)] + public TemplateToken Snapshot + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String BillingOwnerId + { + get; + set; + } + /// /// Gets the collection of variables associated with the current context. /// diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs index e9fb75dfa..8d81c7d2d 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs @@ -29,6 +29,8 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating public const String Id = "id"; public const String If = "if"; public const String Image = "image"; + public const String ImageName = "image-name"; + public const String CustomImageVersion = "version"; public const String Include = "include"; public const String Inputs = "inputs"; public const String Job = "job"; @@ -60,6 +62,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating public const String Services = "services"; public const String Shell = "shell"; public const String Skipped = "skipped"; + public const String Snapshot = "snapshot"; public const String StepEnv = "step-env"; public const String StepIfResult = "step-if-result"; public const String StepWith = "step-with"; diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs index 506a7d268..40f6a1334 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.ComponentModel; +using System.Globalization; using System.Linq; using GitHub.DistributedTask.Expressions2; using GitHub.DistributedTask.Expressions2.Sdk; @@ -346,6 +347,70 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating return result; } + internal static Snapshot ConvertToJobSnapshotRequest(TemplateContext context, TemplateToken token) + { + string imageName = null; + string version = "1.*"; + string versionString = string.Empty; + var condition = $"{PipelineTemplateConstants.Success}()"; + + if (token is StringToken snapshotStringLiteral) + { + imageName = snapshotStringLiteral.Value; + } + else + { + var snapshotMapping = token.AssertMapping($"{PipelineTemplateConstants.Snapshot}"); + foreach (var snapshotPropertyPair in snapshotMapping) + { + var propertyName = snapshotPropertyPair.Key.AssertString($"{PipelineTemplateConstants.Snapshot} key"); + var propertyValue = snapshotPropertyPair.Value; + switch (propertyName.Value) + { + case PipelineTemplateConstants.ImageName: + imageName = snapshotPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Snapshot} {propertyName}").Value; + break; + case PipelineTemplateConstants.If: + condition = ConvertToIfCondition(context, propertyValue, false); + break; + case PipelineTemplateConstants.CustomImageVersion: + versionString = propertyValue.AssertString($"job {PipelineTemplateConstants.Snapshot} {PipelineTemplateConstants.CustomImageVersion}").Value; + version = IsSnapshotImageVersionValid(versionString) ? versionString : null; + break; + default: + propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Snapshot} key"); + break; + } + } + } + + if (String.IsNullOrEmpty(imageName)) + { + return null; + } + + return new Snapshot(imageName) + { + Condition = condition, + Version = version + }; + } + + private static bool IsSnapshotImageVersionValid(string versionString) + { + var versionSegments = versionString.Split("."); + + if (versionSegments.Length != 2 || + !versionSegments[1].Equals("*") || + !Int32.TryParse(versionSegments[0], NumberStyles.None, CultureInfo.InvariantCulture, result: out int parsedMajor) || + parsedMajor < 0) + { + return false; + } + + return true; + } + private static ActionStep ConvertToStep( TemplateContext context, TemplateToken stepsItem, diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs index 331b10246..e5fbd5d28 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs @@ -370,6 +370,32 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating return result; } + public Snapshot EvaluateJobSnapshotRequest(TemplateToken token, + DictionaryContextData contextData, + IList expressionFunctions) + { + var result = default(Snapshot); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData, expressionFunctions); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Snapshot, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobSnapshotRequest(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + private TemplateContext CreateContext( DictionaryContextData contextData, IList expressionFunctions, diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs index 2b42f2918..a7c9a98c5 100644 --- a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs @@ -99,7 +99,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating { if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength) { - error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters."; + error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and must be less than {PipelineConstants.MaxNodeNameLength} characters."; return false; } else if (!m_distinctNames.Add(value)) diff --git a/src/Sdk/DTPipelines/Pipelines/Snapshot.cs b/src/Sdk/DTPipelines/Pipelines/Snapshot.cs new file mode 100644 index 000000000..c1a05674a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Snapshot.cs @@ -0,0 +1,27 @@ +using System; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + public class Snapshot + { + public Snapshot(string imageName, string condition = null, string version = null) + { + ImageName = imageName; + Condition = condition ?? $"{PipelineTemplateConstants.Success}()"; + Version = version ?? "1.*"; + } + + [DataMember(EmitDefaultValue = false)] + public String ImageName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Condition { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Version { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/workflow-v1.0.json b/src/Sdk/DTPipelines/workflow-v1.0.json index c1453f4e6..ec09cfe58 100644 --- a/src/Sdk/DTPipelines/workflow-v1.0.json +++ b/src/Sdk/DTPipelines/workflow-v1.0.json @@ -71,7 +71,8 @@ "env": "job-env", "outputs": "job-outputs", "defaults": "job-defaults", - "steps": "steps" + "steps": "steps", + "snapshot": "snapshot" } } }, @@ -155,6 +156,41 @@ } }, + "snapshot": { + "one-of": [ + "non-empty-string", + "snapshot-mapping" + ] + }, + + "snapshot-mapping": { + "mapping": { + "properties": { + "image-name": { + "type": "non-empty-string", + "required": true + }, + "if": "snapshot-if", + "version": { + "type": "non-empty-string", + "required": false + } + } + } + }, + + "snapshot-if": { + "context": [ + "github", + "inputs", + "vars", + "needs", + "strategy", + "matrix" + ], + "string": {} + }, + "runs-on": { "context": [ "github", diff --git a/src/Sdk/DTWebApi/WebApi/ActionDownloadInfo.cs b/src/Sdk/DTWebApi/WebApi/ActionDownloadInfo.cs index a6b0749f6..b4ade9887 100644 --- a/src/Sdk/DTWebApi/WebApi/ActionDownloadInfo.cs +++ b/src/Sdk/DTWebApi/WebApi/ActionDownloadInfo.cs @@ -9,6 +9,9 @@ namespace GitHub.DistributedTask.WebApi [DataMember(EmitDefaultValue = false)] public ActionDownloadAuthentication Authentication { get; set; } + [DataMember(EmitDefaultValue = false)] + public ActionDownloadPackageDetails PackageDetails { get; set; } + [DataMember(EmitDefaultValue = false)] public string NameWithOwner { get; set; } @@ -37,4 +40,14 @@ namespace GitHub.DistributedTask.WebApi [DataMember(EmitDefaultValue = false)] public string Token { get; set; } } + + [DataContract] + public class ActionDownloadPackageDetails + { + [DataMember(EmitDefaultValue = false)] + public string Version { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string ManifestDigest { get; set; } + } } diff --git a/src/Sdk/DTWebApi/WebApi/ActionsRunServerHttpClient.cs b/src/Sdk/DTWebApi/WebApi/ActionsRunServerHttpClient.cs new file mode 100644 index 000000000..a72e8a28b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ActionsRunServerHttpClient.cs @@ -0,0 +1,95 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [ResourceArea(TaskResourceIds.AreaId)] + public class ActionsRunServerHttpClient : TaskAgentHttpClient + { + private static readonly JsonSerializerSettings s_serializerSettings; + + static ActionsRunServerHttpClient() + { + s_serializerSettings = new VssJsonMediaTypeFormatter().SerializerSettings; + s_serializerSettings.DateParseHandling = DateParseHandling.None; + s_serializerSettings.FloatParseHandling = FloatParseHandling.Double; + } + + public ActionsRunServerHttpClient( + Uri baseUrl, + VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public ActionsRunServerHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public ActionsRunServerHttpClient( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public ActionsRunServerHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public ActionsRunServerHttpClient( + Uri baseUrl, + HttpMessageHandler pipeline, + Boolean disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + public Task GetJobMessageAsync( + string messageId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("25adab70-1379-4186-be8e-b643061ebe3a"); + object routeValues = new { messageId = messageId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(6.0, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + protected override async Task ReadJsonContentAsync(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken)) + { + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + return JsonConvert.DeserializeObject(json, s_serializerSettings); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Exceptions.cs b/src/Sdk/DTWebApi/WebApi/Exceptions.cs index 97505bb6a..ee47f1370 100644 --- a/src/Sdk/DTWebApi/WebApi/Exceptions.cs +++ b/src/Sdk/DTWebApi/WebApi/Exceptions.cs @@ -1539,6 +1539,26 @@ namespace GitHub.DistributedTask.WebApi } } + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationJobUnprocessableException", "GitHub.DistributedTask.WebApi.TaskOrchestrationJobUnprocessableException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationJobUnprocessableException : DistributedTaskException + { + public TaskOrchestrationJobUnprocessableException(String message) + : base(message) + { + } + + public TaskOrchestrationJobUnprocessableException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationJobUnprocessableException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + [Serializable] [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanSecurityException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanSecurityException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] public sealed class TaskOrchestrationPlanSecurityException : DistributedTaskException @@ -2498,6 +2518,25 @@ namespace GitHub.DistributedTask.WebApi } } + [Serializable] + public class NonRetryableActionDownloadInfoException : DistributedTaskException + { + public NonRetryableActionDownloadInfoException(String message) + : base(message) + { + } + + public NonRetryableActionDownloadInfoException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected NonRetryableActionDownloadInfoException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + [Serializable] public sealed class FailedToResolveActionDownloadInfoException : DistributedTaskException { diff --git a/src/Sdk/DTWebApi/WebApi/Runner.cs b/src/Sdk/DTWebApi/WebApi/Runner.cs index 91f13e036..f3fdbf60e 100644 --- a/src/Sdk/DTWebApi/WebApi/Runner.cs +++ b/src/Sdk/DTWebApi/WebApi/Runner.cs @@ -19,7 +19,7 @@ namespace GitHub.DistributedTask.WebApi } /// - /// The url to connect to to poll for messages + /// The url to connect to poll for messages /// [JsonProperty("server_url")] public string ServerUrl diff --git a/src/Sdk/DTWebApi/WebApi/RunnerRefreshConfigMessage.cs b/src/Sdk/DTWebApi/WebApi/RunnerRefreshConfigMessage.cs new file mode 100644 index 000000000..064ce928a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/RunnerRefreshConfigMessage.cs @@ -0,0 +1,58 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class RunnerRefreshConfigMessage + { + public static readonly String MessageType = "RunnerRefreshConfig"; + + [JsonConstructor] + internal RunnerRefreshConfigMessage() + { + } + + public RunnerRefreshConfigMessage( + string runnerQualifiedId, + string configType, + string serviceType, + string configRefreshUrl) + { + this.RunnerQualifiedId = runnerQualifiedId; + this.ConfigType = configType; + this.ServiceType = serviceType; + this.ConfigRefreshUrl = configRefreshUrl; + } + + [DataMember(Name = "runnerQualifiedId")] + public String RunnerQualifiedId + { + get; + private set; + } + + [DataMember(Name = "configType")] + public String ConfigType + { + get; + private set; + } + + [DataMember(Name = "serviceType")] + public String ServiceType + { + get; + private set; + } + + [DataMember(Name = "configRefreshURL")] + public String ConfigRefreshUrl + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceConnectivityCheck.cs b/src/Sdk/DTWebApi/WebApi/ServiceConnectivityCheck.cs new file mode 100644 index 000000000..ff4845fec --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceConnectivityCheck.cs @@ -0,0 +1,42 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class ServiceConnectivityCheckInput + { + [JsonConstructor] + public ServiceConnectivityCheckInput() + { + Endpoints = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + [DataMember(EmitDefaultValue = false)] + public Dictionary Endpoints { get; set; } + + [DataMember(EmitDefaultValue = false)] + public int IntervalInSecond { get; set; } + + [DataMember(EmitDefaultValue = false)] + public int RequestTimeoutInSecond { get; set; } + } + + [DataContract] + public class ServiceConnectivityCheckResult + { + [JsonConstructor] + public ServiceConnectivityCheckResult() + { + EndpointsResult = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } + + [DataMember(Order = 1, EmitDefaultValue = true)] + public bool HasFailure { get; set; } + + [DataMember(Order = 2, EmitDefaultValue = false)] + public Dictionary> EndpointsResult { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs index 4b08ebaa8..c97fea0a4 100644 --- a/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs @@ -141,24 +141,6 @@ namespace GitHub.DistributedTask.WebApi return ReplaceAgentAsync(poolId, agent.Id, agent, userState, cancellationToken); } - public Task GetJobMessageAsync( - string messageId, - object userState = null, - CancellationToken cancellationToken = default) - { - HttpMethod httpMethod = new HttpMethod("GET"); - Guid locationId = new Guid("25adab70-1379-4186-be8e-b643061ebe3a"); - object routeValues = new { messageId = messageId }; - - return SendAsync( - httpMethod, - locationId, - routeValues: routeValues, - version: new ApiResourceVersion(6.0, 1), - userState: userState, - cancellationToken: cancellationToken); - } - protected Task SendAsync( HttpMethod method, Guid locationId, diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs new file mode 100644 index 000000000..5c8f95ed7 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentMessageTypes.cs @@ -0,0 +1,10 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + public sealed class TaskAgentMessageTypes + { + public static readonly string ForceTokenRefresh = "ForceTokenRefresh"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs b/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs index c6ca7b0b4..a4ea950de 100644 --- a/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs +++ b/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs @@ -7,5 +7,6 @@ namespace GitHub.DistributedTask.WebApi public static readonly String JobId = "system.jobId"; public static readonly String RunnerLowDiskspaceThreshold = "system.runner.lowdiskspacethreshold"; public static readonly String RunnerEnvironment = "system.runnerEnvironment"; + public static readonly String RunnerServiceConnectivityTest = "system.runner.serviceconnectivitycheckinput"; } } diff --git a/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs b/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs index c010f8208..020094b91 100644 --- a/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs +++ b/src/Sdk/RSWebApi/Contracts/AcquireJobRequest.cs @@ -7,5 +7,11 @@ namespace GitHub.Actions.RunService.WebApi { [DataMember(Name = "jobMessageId", EmitDefaultValue = false)] public string JobMessageId { get; set; } + + [DataMember(Name = "runnerOS", EmitDefaultValue = false)] + public string RunnerOS { get; set; } + + [DataMember(Name = "billingOwnerId", EmitDefaultValue = false)] + public string BillingOwnerId { get; set; } } } diff --git a/src/Sdk/RSWebApi/Contracts/Annotation.cs b/src/Sdk/RSWebApi/Contracts/Annotation.cs index fab07bec9..99f9419f4 100644 --- a/src/Sdk/RSWebApi/Contracts/Annotation.cs +++ b/src/Sdk/RSWebApi/Contracts/Annotation.cs @@ -11,6 +11,9 @@ namespace Sdk.RSWebApi.Contracts [DataMember(Name = "message", EmitDefaultValue = false)] public string Message; + [DataMember(Name = "title", EmitDefaultValue = false)] + public string Title; + [DataMember(Name = "rawDetails", EmitDefaultValue = false)] public string RawDetails; @@ -31,5 +34,8 @@ namespace Sdk.RSWebApi.Contracts [DataMember(Name = "endColumn", EmitDefaultValue = false)] public long EndColumn; + + [DataMember(Name = "stepNumber", EmitDefaultValue = false)] + public long StepNumber; } } diff --git a/src/Sdk/RSWebApi/Contracts/BrokerError.cs b/src/Sdk/RSWebApi/Contracts/BrokerError.cs new file mode 100644 index 000000000..c2e4bfa7b --- /dev/null +++ b/src/Sdk/RSWebApi/Contracts/BrokerError.cs @@ -0,0 +1,20 @@ +using System.Runtime.Serialization; + +namespace GitHub.Actions.RunService.WebApi +{ + [DataContract] + public class BrokerError + { + [DataMember(Name = "source", EmitDefaultValue = false)] + public string Source { get; set; } + + [DataMember(Name = "errorKind", EmitDefaultValue = false)] + public string ErrorKind { get; set; } + + [DataMember(Name = "statusCode", EmitDefaultValue = false)] + public int StatusCode { get; set; } + + [DataMember(Name = "errorMessage", EmitDefaultValue = false)] + public string Message { get; set; } + } +} diff --git a/src/Sdk/RSWebApi/Contracts/BrokerErrorKind.cs b/src/Sdk/RSWebApi/Contracts/BrokerErrorKind.cs new file mode 100644 index 000000000..48295a1a1 --- /dev/null +++ b/src/Sdk/RSWebApi/Contracts/BrokerErrorKind.cs @@ -0,0 +1,12 @@ +using System.Runtime.Serialization; + +namespace GitHub.Actions.RunService.WebApi +{ + [DataContract] + public class BrokerErrorKind + { + public const string RunnerNotFound = "RunnerNotFound"; + public const string RunnerVersionTooOld = "RunnerVersionTooOld"; + public const string HostedRunnerDeprovisioned = "HostedRunnerDeprovisioned"; + } +} diff --git a/src/Sdk/RSWebApi/Contracts/CompleteJobRequest.cs b/src/Sdk/RSWebApi/Contracts/CompleteJobRequest.cs index fff5156a4..a9ba71a57 100644 --- a/src/Sdk/RSWebApi/Contracts/CompleteJobRequest.cs +++ b/src/Sdk/RSWebApi/Contracts/CompleteJobRequest.cs @@ -27,7 +27,13 @@ namespace GitHub.Actions.RunService.WebApi [DataMember(Name = "annotations", EmitDefaultValue = false)] public IList Annotations { get; set; } + [DataMember(Name = "telemetry", EmitDefaultValue = false)] + public IList Telemetry { get; set; } + [DataMember(Name = "environmentUrl", EmitDefaultValue = false)] public string EnvironmentUrl { get; set; } + + [DataMember(Name = "billingOwnerId", EmitDefaultValue = false)] + public string BillingOwnerId { get; set; } } } diff --git a/src/Sdk/RSWebApi/Contracts/IssueExtensions.cs b/src/Sdk/RSWebApi/Contracts/IssueExtensions.cs index 559935ba6..113eaa7e0 100644 --- a/src/Sdk/RSWebApi/Contracts/IssueExtensions.cs +++ b/src/Sdk/RSWebApi/Contracts/IssueExtensions.cs @@ -22,6 +22,8 @@ namespace Sdk.RSWebApi.Contracts var columnNumber = GetAnnotationNumber(issue, RunIssueKeys.Col) ?? 0; var endColumnNumber = GetAnnotationNumber(issue, RunIssueKeys.EndColumn) ?? columnNumber; var logLineNumber = GetAnnotationNumber(issue, RunIssueKeys.LogLineNumber) ?? 0; + var stepNumber = GetAnnotationNumber(issue, RunIssueKeys.StepNumber) ?? 0; + var title = GetAnnotationField(issue, RunIssueKeys.Title); if (path == null && lineNumber == 0 && logLineNumber != 0) { @@ -33,11 +35,13 @@ namespace Sdk.RSWebApi.Contracts { Level = annotationLevel, Message = issueMessage, + Title = title, Path = path, StartLine = lineNumber, EndLine = endLineNumber, StartColumn = columnNumber, EndColumn = endColumnNumber, + StepNumber = stepNumber, }; } diff --git a/src/Sdk/RSWebApi/Contracts/IssueKeys.cs b/src/Sdk/RSWebApi/Contracts/IssueKeys.cs index 61df3b8be..0966e0e14 100644 --- a/src/Sdk/RSWebApi/Contracts/IssueKeys.cs +++ b/src/Sdk/RSWebApi/Contracts/IssueKeys.cs @@ -9,5 +9,7 @@ public const string EndLine = "endLine"; public const string EndColumn = "endColumn"; public const string LogLineNumber = "logFileLineNumber"; + public const string StepNumber = "stepNumber"; + public const string Title = "title"; } } diff --git a/src/Sdk/RSWebApi/Contracts/RunServiceError.cs b/src/Sdk/RSWebApi/Contracts/RunServiceError.cs new file mode 100644 index 000000000..009a5914a --- /dev/null +++ b/src/Sdk/RSWebApi/Contracts/RunServiceError.cs @@ -0,0 +1,17 @@ +using System.Runtime.Serialization; + +namespace GitHub.Actions.RunService.WebApi +{ + [DataContract] + public class RunServiceError + { + [DataMember(Name = "source", EmitDefaultValue = false)] + public string Source { get; set; } + + [DataMember(Name = "statusCode", EmitDefaultValue = false)] + public int Code { get; set; } + + [DataMember(Name = "errorMessage", EmitDefaultValue = false)] + public string Message { get; set; } + } +} diff --git a/src/Sdk/RSWebApi/Contracts/StepResult.cs b/src/Sdk/RSWebApi/Contracts/StepResult.cs index 1da4a2f97..300fb7741 100644 --- a/src/Sdk/RSWebApi/Contracts/StepResult.cs +++ b/src/Sdk/RSWebApi/Contracts/StepResult.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Runtime.Serialization; using System.Threading.Tasks; @@ -16,9 +16,20 @@ namespace GitHub.Actions.RunService.WebApi [DataMember(Name = "number", EmitDefaultValue = false)] public int? Number { get; set; } + // Example: "Run actions/checkout@v3" [DataMember(Name = "name", EmitDefaultValue = false)] public string Name { get; set; } + // Example: "actions/checkout" + [DataMember(Name = "action_name", EmitDefaultValue = false)] + public string ActionName { get; set; } + + [DataMember(Name = "ref", EmitDefaultValue = false)] + public string Ref { get; set; } + + [DataMember(Name = "type", EmitDefaultValue = false)] + public string Type { get; set; } + [DataMember(Name = "status")] public TimelineRecordState? Status { get; set; } diff --git a/src/Sdk/RSWebApi/Contracts/Telemetry.cs b/src/Sdk/RSWebApi/Contracts/Telemetry.cs new file mode 100644 index 000000000..9dda8aa13 --- /dev/null +++ b/src/Sdk/RSWebApi/Contracts/Telemetry.cs @@ -0,0 +1,20 @@ +using System.Runtime.Serialization; + +namespace Sdk.RSWebApi.Contracts +{ + [DataContract] + public struct Telemetry + { + public Telemetry(string message, string type) + { + Message = message; + Type = type; + } + + [DataMember(Name = "message", EmitDefaultValue = false)] + public string Message { get; set; } + + [DataMember(Name = "type", EmitDefaultValue = false)] + public string Type { get; set; } + } +} diff --git a/src/Sdk/RSWebApi/RunServiceHttpClient.cs b/src/Sdk/RSWebApi/RunServiceHttpClient.cs index bafcee5aa..bb1407706 100644 --- a/src/Sdk/RSWebApi/RunServiceHttpClient.cs +++ b/src/Sdk/RSWebApi/RunServiceHttpClient.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Linq; using System.Net; using System.Net.Http; using System.Threading; @@ -9,6 +10,7 @@ using GitHub.DistributedTask.WebApi; using GitHub.Services.Common; using GitHub.Services.OAuth; using GitHub.Services.WebApi; +using Newtonsoft.Json; using Sdk.RSWebApi.Contracts; using Sdk.WebApi.WebApi; @@ -16,6 +18,15 @@ namespace GitHub.Actions.RunService.WebApi { public class RunServiceHttpClient : RawHttpClientBase { + private static readonly JsonSerializerSettings s_serializerSettings; + + static RunServiceHttpClient() + { + s_serializerSettings = new VssJsonMediaTypeFormatter().SerializerSettings; + s_serializerSettings.DateParseHandling = DateParseHandling.None; + s_serializerSettings.FloatParseHandling = FloatParseHandling.Double; + } + public RunServiceHttpClient( Uri baseUrl, VssOAuthCredential credentials) @@ -59,12 +70,16 @@ namespace GitHub.Actions.RunService.WebApi public async Task GetJobMessageAsync( Uri requestUri, string messageId, + string runnerOS, + string billingOwnerId, CancellationToken cancellationToken = default) { HttpMethod httpMethod = new HttpMethod("POST"); var payload = new AcquireJobRequest { JobMessageId = messageId, + RunnerOS = runnerOS, + BillingOwnerId = billingOwnerId, }; requestUri = new Uri(requestUri, "acquirejob"); @@ -74,6 +89,7 @@ namespace GitHub.Actions.RunService.WebApi httpMethod, requestUri: requestUri, content: requestContent, + readErrorBody: true, cancellationToken: cancellationToken); if (result.IsSuccess) @@ -81,14 +97,26 @@ namespace GitHub.Actions.RunService.WebApi return result.Value; } - switch (result.StatusCode) + if (TryParseErrorBody(result.ErrorBody, out RunServiceError error)) { - case HttpStatusCode.NotFound: - throw new TaskOrchestrationJobNotFoundException($"Job message not found: {messageId}"); - case HttpStatusCode.Conflict: - throw new TaskOrchestrationJobAlreadyAcquiredException($"Job message already acquired: {messageId}"); - default: - throw new Exception($"Failed to get job message: {result.Error}"); + switch ((HttpStatusCode)error.Code) + { + case HttpStatusCode.NotFound: + throw new TaskOrchestrationJobNotFoundException($"Job message not found '{messageId}'. {error.Message}"); + case HttpStatusCode.Conflict: + throw new TaskOrchestrationJobAlreadyAcquiredException($"Job message already acquired '{messageId}'. {error.Message}"); + case HttpStatusCode.UnprocessableEntity: + throw new TaskOrchestrationJobUnprocessableException($"Unprocessable job '{messageId}'. {error.Message}"); + } + } + + if (!string.IsNullOrEmpty(result.ErrorBody)) + { + throw new Exception($"Failed to get job message: {result.Error}. {Truncate(result.ErrorBody)}"); + } + else + { + throw new Exception($"Failed to get job message: {result.Error}"); } } @@ -96,11 +124,13 @@ namespace GitHub.Actions.RunService.WebApi Uri requestUri, Guid planId, Guid jobId, - TaskResult result, + TaskResult conclusion, Dictionary outputs, IList stepResults, IList jobAnnotations, string environmentUrl, + IList telemetry, + string billingOwnerId, CancellationToken cancellationToken = default) { HttpMethod httpMethod = new HttpMethod("POST"); @@ -108,32 +138,44 @@ namespace GitHub.Actions.RunService.WebApi { PlanID = planId, JobID = jobId, - Conclusion = result, + Conclusion = conclusion, Outputs = outputs, StepResults = stepResults, Annotations = jobAnnotations, EnvironmentUrl = environmentUrl, + Telemetry = telemetry, + BillingOwnerId = billingOwnerId, }; requestUri = new Uri(requestUri, "completejob"); var requestContent = new ObjectContent(payload, new VssJsonMediaTypeFormatter(true)); - var response = await SendAsync( + var result = await Send2Async( httpMethod, requestUri, content: requestContent, cancellationToken: cancellationToken); - if (response.IsSuccessStatusCode) + if (result.IsSuccess) { return; } - switch (response.StatusCode) + if (TryParseErrorBody(result.ErrorBody, out RunServiceError error)) { - case HttpStatusCode.NotFound: - throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}"); - default: - throw new Exception($"Failed to complete job: {response.ReasonPhrase}"); + switch ((HttpStatusCode)error.Code) + { + case HttpStatusCode.NotFound: + throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}. {error.Message}"); + } + } + + if (!string.IsNullOrEmpty(result.ErrorBody)) + { + throw new Exception($"Failed to complete job: {result.Error}. {Truncate(result.ErrorBody)}"); + } + else + { + throw new Exception($"Failed to complete job: {result.Error}"); } } @@ -157,6 +199,7 @@ namespace GitHub.Actions.RunService.WebApi httpMethod, requestUri, content: requestContent, + readErrorBody: true, cancellationToken: cancellationToken); if (result.IsSuccess) @@ -164,13 +207,61 @@ namespace GitHub.Actions.RunService.WebApi return result.Value; } - switch (result.StatusCode) + if (TryParseErrorBody(result.ErrorBody, out RunServiceError error)) { - case HttpStatusCode.NotFound: - throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}"); - default: - throw new Exception($"Failed to renew job: {result.Error}"); + switch ((HttpStatusCode)error.Code) + { + case HttpStatusCode.NotFound: + throw new TaskOrchestrationJobNotFoundException($"Job not found: {jobId}. {error.Message}"); + } } + + if (!string.IsNullOrEmpty(result.ErrorBody)) + { + throw new Exception($"Failed to renew job: {result.Error}. {Truncate(result.ErrorBody)}"); + } + else + { + throw new Exception($"Failed to renew job: {result.Error}"); + } + } + + protected override async Task ReadJsonContentAsync(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken)) + { + var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + return JsonConvert.DeserializeObject(json, s_serializerSettings); + } + + private static bool TryParseErrorBody(string errorBody, out RunServiceError error) + { + if (!string.IsNullOrEmpty(errorBody)) + { + try + { + error = JsonUtility.FromString(errorBody); + if (error?.Source == "actions-run-service") + { + return true; + } + } + catch (Exception) + { + } + } + + error = null; + return false; + } + + internal static string Truncate(string errorBody) + { + const int maxLength = 200; + if (errorBody.Length > maxLength) + { + return errorBody.Substring(0, maxLength) + "[truncated]"; + } + + return errorBody; } } } diff --git a/src/Sdk/Sdk.csproj b/src/Sdk/Sdk.csproj index ff1cb85a4..3f6766e7c 100644 --- a/src/Sdk/Sdk.csproj +++ b/src/Sdk/Sdk.csproj @@ -1,11 +1,12 @@ - net6.0 + net8.0 Library win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 + true - NU1701;NU1603 + NU1701;NU1603;SYSLIB0050;SYSLIB0051 $(Version) TRACE 8.0 @@ -13,18 +14,19 @@ - - + + - - - - - + + + + + + diff --git a/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs b/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs index 37380e0c3..d4a055262 100644 --- a/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs +++ b/src/Sdk/WebApi/WebApi/BrokerHttpClient.cs @@ -79,6 +79,7 @@ namespace GitHub.Actions.RunService.WebApi { queryParams.Add("status", status.Value.ToString()); } + if (runnerVersion != null) { queryParams.Add("runnerVersion", runnerVersion); @@ -103,6 +104,7 @@ namespace GitHub.Actions.RunService.WebApi new HttpMethod("GET"), requestUri: requestUri, queryParameters: queryParams, + readErrorBody: true, cancellationToken: cancellationToken); if (result.IsSuccess) @@ -110,16 +112,37 @@ namespace GitHub.Actions.RunService.WebApi return result.Value; } - if (result.StatusCode == HttpStatusCode.Forbidden) + if (TryParseErrorBody(result.ErrorBody, out BrokerError brokerError)) { - throw new AccessDeniedException(result.Error); + switch (brokerError.ErrorKind) + { + case BrokerErrorKind.RunnerNotFound: + throw new RunnerNotFoundException(brokerError.Message); + case BrokerErrorKind.RunnerVersionTooOld: + throw new AccessDeniedException(brokerError.Message) + { + ErrorCode = 1 + }; + case BrokerErrorKind.HostedRunnerDeprovisioned: + throw new HostedRunnerDeprovisionedException(brokerError.Message); + default: + break; + } } - throw new Exception($"Failed to get job message: {result.Error}"); + // temporary back compat + if (result.StatusCode == HttpStatusCode.Forbidden) + { + throw new AccessDeniedException($"{result.Error} Runner version v{runnerVersion} is deprecated and cannot receive messages.") + { + ErrorCode = 1 + }; + } + + throw new Exception($"Failed to get job message. Request to {requestUri} failed with status: {result.StatusCode}. Error message {result.Error}"); } public async Task CreateSessionAsync( - TaskAgentSession session, CancellationToken cancellationToken = default) { @@ -167,5 +190,96 @@ namespace GitHub.Actions.RunService.WebApi throw new Exception($"Failed to delete broker session: {result.Error}"); } + + public async Task AcknowledgeRunnerRequestAsync( + string runnerRequestId, + Guid? sessionId, + string runnerVersion, + TaskAgentStatus? status, + string os = null, + string architecture = null, + CancellationToken cancellationToken = default) + { + // URL + var requestUri = new Uri(Client.BaseAddress, "acknowledge"); + + // Query parameters + List> queryParams = new List>(); + if (sessionId != null) + { + queryParams.Add("sessionId", sessionId.Value.ToString()); + } + if (status != null) + { + queryParams.Add("status", status.Value.ToString()); + } + if (runnerVersion != null) + { + queryParams.Add("runnerVersion", runnerVersion); + } + if (os != null) + { + queryParams.Add("os", os); + } + if (architecture != null) + { + queryParams.Add("architecture", architecture); + } + + // Body + var payload = new Dictionary + { + ["runnerRequestId"] = runnerRequestId, + }; + var requestContent = new ObjectContent>(payload, new VssJsonMediaTypeFormatter(true)); + + // POST + var result = await SendAsync( + new HttpMethod("POST"), + requestUri: requestUri, + queryParameters: queryParams, + content: requestContent, + readErrorBody: true, + cancellationToken: cancellationToken); + + if (result.IsSuccess) + { + return; + } + + if (TryParseErrorBody(result.ErrorBody, out BrokerError brokerError)) + { + switch (brokerError.ErrorKind) + { + case BrokerErrorKind.RunnerNotFound: + throw new RunnerNotFoundException(brokerError.Message); + default: + break; + } + } + + throw new Exception($"Failed to acknowledge runner request. Request to {requestUri} failed with status: {result.StatusCode}. Error message {result.Error}"); + } + + private static bool TryParseErrorBody(string errorBody, out BrokerError error) + { + if (!string.IsNullOrEmpty(errorBody)) + { + try + { + error = JsonUtility.FromString(errorBody); + if (error?.Source == "actions-broker-listener") + { + return true; + } + } + catch (Exception) + { + } + } + + error = null; + return false; + } } } diff --git a/src/Sdk/WebApi/WebApi/Exceptions/HostedRunnerDeprovisionedException.cs b/src/Sdk/WebApi/WebApi/Exceptions/HostedRunnerDeprovisionedException.cs new file mode 100644 index 000000000..c9ae7e4b4 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/HostedRunnerDeprovisionedException.cs @@ -0,0 +1,23 @@ +using System; + +namespace GitHub.Services.WebApi +{ + [Serializable] + public sealed class HostedRunnerDeprovisionedException : Exception + { + public HostedRunnerDeprovisionedException() + : base() + { + } + + public HostedRunnerDeprovisionedException(String message) + : base(message) + { + } + + public HostedRunnerDeprovisionedException(String message, Exception innerException) + : base(message, innerException) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/RunnerNotFoundException.cs b/src/Sdk/WebApi/WebApi/Exceptions/RunnerNotFoundException.cs new file mode 100644 index 000000000..957d54b89 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/RunnerNotFoundException.cs @@ -0,0 +1,26 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.WebApi +{ + [Serializable] + public sealed class RunnerNotFoundException : Exception + { + public RunnerNotFoundException() + : base() + { + } + + public RunnerNotFoundException(String message) + : base(message) + { + } + + public RunnerNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs index 15216cb14..10c25904b 100644 --- a/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs @@ -25,7 +25,10 @@ namespace GitHub.Services.WebApi.Jwt HS256, [EnumMember] - RS256 + RS256, + + [EnumMember] + PS256, } //JsonWebToken is marked as DataContract so @@ -286,6 +289,7 @@ namespace GitHub.Services.WebApi.Jwt { case JWTAlgorithm.HS256: case JWTAlgorithm.RS256: + case JWTAlgorithm.PS256: return signingCredentials.SignData(bytes); default: diff --git a/src/Sdk/WebApi/WebApi/LaunchContracts.cs b/src/Sdk/WebApi/WebApi/LaunchContracts.cs index 41a67113e..28b6ce3cf 100644 --- a/src/Sdk/WebApi/WebApi/LaunchContracts.cs +++ b/src/Sdk/WebApi/WebApi/LaunchContracts.cs @@ -30,6 +30,9 @@ namespace GitHub.Services.Launch.Contracts [DataMember(EmitDefaultValue = false, Name = "authentication")] public ActionDownloadAuthenticationResponse Authentication { get; set; } + [DataMember(EmitDefaultValue = false, Name = "package_details")] + public ActionDownloadPackageDetailsResponse PackageDetails { get; set; } + [DataMember(EmitDefaultValue = false, Name = "name")] public string Name { get; set; } @@ -59,6 +62,17 @@ namespace GitHub.Services.Launch.Contracts public string Token { get; set; } } + + [DataContract] + public class ActionDownloadPackageDetailsResponse + { + [DataMember(EmitDefaultValue = false, Name = "version")] + public string Version { get; set; } + + [DataMember(EmitDefaultValue = false, Name = "manifest_digest")] + public string ManifestDigest { get; set; } + } + [DataContract] public class ActionDownloadInfoResponseCollection { @@ -67,4 +81,25 @@ namespace GitHub.Services.Launch.Contracts [DataMember(EmitDefaultValue = false, Name = "actions")] public IDictionary Actions { get; set; } } + + [DataContract] + public class ActionDownloadResolutionError + { + /// + /// The error message associated with the action download error. + /// + [DataMember(EmitDefaultValue = false, Name = "message")] + public string Message { get; set; } + } + + [DataContract] + public class ActionDownloadResolutionErrorCollection + { + /// + /// A mapping of action specifications to their download errors. + /// The key is the full name of the action plus version, e.g. "actions/checkout@v2". + /// + [DataMember(EmitDefaultValue = false, Name = "errors")] + public IDictionary Errors { get; set; } + } } diff --git a/src/Sdk/WebApi/WebApi/LaunchHttpClient.cs b/src/Sdk/WebApi/WebApi/LaunchHttpClient.cs index bf2f4c00e..24e398636 100644 --- a/src/Sdk/WebApi/WebApi/LaunchHttpClient.cs +++ b/src/Sdk/WebApi/WebApi/LaunchHttpClient.cs @@ -2,6 +2,7 @@ using System; using System.Linq; +using System.Net; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; @@ -32,11 +33,52 @@ namespace GitHub.Services.Launch.Client public async Task GetResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken) { var GetResolveActionsDownloadInfoURLEndpoint = new Uri(m_launchServiceUrl, $"/actions/build/{planId.ToString()}/jobs/{jobId.ToString()}/runnerresolve/actions"); - return ToServerData(await GetLaunchSignedURLResponse(GetResolveActionsDownloadInfoURLEndpoint, ToGitHubData(actionReferenceList), cancellationToken)); + var response = await GetLaunchSignedURLResponse(GetResolveActionsDownloadInfoURLEndpoint, ToGitHubData(actionReferenceList), cancellationToken); + return ToServerData(await ReadJsonContentAsync(response, cancellationToken)); } - // Resolve Actions - private async Task GetLaunchSignedURLResponse(Uri uri, R request, CancellationToken cancellationToken) + public async Task GetResolveActionsDownloadInfoAsyncV2(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken) + { + var GetResolveActionsDownloadInfoURLEndpoint = new Uri(m_launchServiceUrl, $"/actions/build/{planId.ToString()}/jobs/{jobId.ToString()}/runnerresolve/actions"); + var response = await GetLaunchSignedURLResponse(GetResolveActionsDownloadInfoURLEndpoint, ToGitHubData(actionReferenceList), cancellationToken); + + if (response.IsSuccessStatusCode) + { + // Success response - deserialize the action download info + return ToServerData(await ReadJsonContentAsync(response, cancellationToken)); + } + + var responseError = response.ReasonPhrase ?? ""; + if (response.StatusCode == HttpStatusCode.UnprocessableEntity) + { + // 422 response - unresolvable actions, error details are in the body + var errors = await ReadJsonContentAsync(response, cancellationToken); + string combinedErrorMessage; + if (errors?.Errors != null && errors.Errors.Any()) + { + combinedErrorMessage = String.Join(". ", errors.Errors.Select(kvp => kvp.Value.Message)); + } + else + { + combinedErrorMessage = responseError; + } + + throw new UnresolvableActionDownloadInfoException(combinedErrorMessage); + } + else if (response.StatusCode == HttpStatusCode.TooManyRequests) + { + // Here we want to add a message so customers don't think it's a rate limit scoped to them + // Ideally this would be 500 but the runner retries 500s, which we don't want to do when we're being rate limited + // See: https://github.com/github/ecosystem-api/issues/4084 + throw new NonRetryableActionDownloadInfoException(responseError + " (GitHub has reached an internal rate limit, please try again later)"); + } + else + { + throw new Exception(responseError); + } + } + + private async Task GetLaunchSignedURLResponse(Uri uri, R request, CancellationToken cancellationToken) { using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri)) { @@ -46,10 +88,7 @@ namespace GitHub.Services.Launch.Client using (HttpContent content = new ObjectContent(request, m_formatter)) { requestMessage.Content = content; - using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken)) - { - return await ReadJsonContentAsync(response, cancellationToken); - } + return await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken); } } } @@ -91,6 +130,7 @@ namespace GitHub.Services.Launch.Client TarballUrl = actionDownloadInfoResponse.TarUrl, Ref = actionDownloadInfoResponse.Version, ZipballUrl = actionDownloadInfoResponse.ZipUrl, + PackageDetails = ToServerData(actionDownloadInfoResponse.PackageDetails) }; } @@ -108,6 +148,21 @@ namespace GitHub.Services.Launch.Client }; } + + private static ActionDownloadPackageDetails? ToServerData(ActionDownloadPackageDetailsResponse? actionDownloadPackageDetails) + { + if (actionDownloadPackageDetails == null) + { + return null; + } + + return new ActionDownloadPackageDetails + { + Version = actionDownloadPackageDetails.Version, + ManifestDigest = actionDownloadPackageDetails.ManifestDigest + }; + } + private MediaTypeFormatter m_formatter; private Uri m_launchServiceUrl; private string m_token; diff --git a/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs b/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs index c1997a1b7..43a222626 100644 --- a/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs +++ b/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs @@ -677,7 +677,7 @@ namespace GitHub.Services.WebApi.Location Int32 lastChangeId = m_locationDataCacheManager.GetLastChangeId(); // If we have -1 then that means we have no disk cache yet or it means that we recently hit an exception trying to reload - // the the cache from disk (see Exception catch block in EnsureDiskCacheLoaded). + // the cache from disk (see Exception catch block in EnsureDiskCacheLoaded). // Either way, we cannot make a call to the server with -1 and pass None. // If we do, the resulting payload (which would have ClientCacheFresh=false but include no ServiceDefinitions) // would leave the in-memory cache in an inconsistent state diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs index 5ebf86f9a..34ec103d0 100644 --- a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs @@ -85,6 +85,7 @@ namespace GitHub.Services.OAuth set; } + [Obsolete] public override void GetObjectData(SerializationInfo info, StreamingContext context) { base.GetObjectData(info, context); diff --git a/src/Sdk/WebApi/WebApi/RawHttpClientBase.cs b/src/Sdk/WebApi/WebApi/RawHttpClientBase.cs index de7c3bcb3..23c514724 100644 --- a/src/Sdk/WebApi/WebApi/RawHttpClientBase.cs +++ b/src/Sdk/WebApi/WebApi/RawHttpClientBase.cs @@ -101,7 +101,7 @@ namespace Sdk.WebApi.WebApi } } - protected Task> SendAsync( + protected async Task Send2Async( HttpMethod method, Uri requestUri, HttpContent content = null, @@ -109,7 +109,47 @@ namespace Sdk.WebApi.WebApi Object userState = null, CancellationToken cancellationToken = default(CancellationToken)) { - return SendAsync(method, null, requestUri, content, queryParameters, userState, cancellationToken); + using (var response = await SendAsync(method, requestUri, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + if (response.IsSuccessStatusCode) + { + return new RawHttpClientResult( + isSuccess: true, + error: string.Empty, + statusCode: response.StatusCode); + } + else + { + var errorBody = default(string); + try + { + errorBody = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + errorBody = $"Error reading HTTP response body: {ex.Message}"; + } + + string errorMessage = $"Error: {response.ReasonPhrase}"; + return new RawHttpClientResult( + isSuccess: false, + error: errorMessage, + statusCode: response.StatusCode, + errorBody: errorBody); + } + } + } + + protected Task> SendAsync( + HttpMethod method, + Uri requestUri, + HttpContent content = null, + IEnumerable> queryParameters = null, + Boolean readErrorBody = false, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return SendAsync(method, null, requestUri, content, queryParameters, readErrorBody, userState, cancellationToken); } protected async Task> SendAsync( @@ -118,18 +158,20 @@ namespace Sdk.WebApi.WebApi Uri requestUri, HttpContent content = null, IEnumerable> queryParameters = null, + Boolean readErrorBody = false, Object userState = null, CancellationToken cancellationToken = default(CancellationToken)) { using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) using (HttpRequestMessage requestMessage = CreateRequestMessage(method, additionalHeaders, requestUri, content, queryParameters)) { - return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + return await SendAsync(requestMessage, readErrorBody, userState, cancellationToken).ConfigureAwait(false); } } protected async Task> SendAsync( HttpRequestMessage message, + Boolean readErrorBody = false, Object userState = null, CancellationToken cancellationToken = default(CancellationToken)) { @@ -145,8 +187,21 @@ namespace Sdk.WebApi.WebApi } else { + var errorBody = default(string); + if (readErrorBody) + { + try + { + errorBody = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + errorBody = $"Error reading HTTP response body: {ex.Message}"; + } + } + string errorMessage = $"Error: {response.ReasonPhrase}"; - return RawHttpClientResult.Fail(errorMessage, response.StatusCode); + return RawHttpClientResult.Fail(errorMessage, response.StatusCode, errorBody); } } } diff --git a/src/Sdk/WebApi/WebApi/RawHttpClientResult.cs b/src/Sdk/WebApi/WebApi/RawHttpClientResult.cs index 1b2dc5f06..113de871f 100644 --- a/src/Sdk/WebApi/WebApi/RawHttpClientResult.cs +++ b/src/Sdk/WebApi/WebApi/RawHttpClientResult.cs @@ -5,15 +5,27 @@ namespace Sdk.WebApi.WebApi public class RawHttpClientResult { public bool IsSuccess { get; protected set; } + + /// + /// A description of the HTTP status code, like "Error: Unprocessable Entity" + /// public string Error { get; protected set; } + + /// + /// The HTTP response body for unsuccessful HTTP status codes, or an error message when reading the response body fails. + /// + public string ErrorBody { get; protected set; } + public HttpStatusCode StatusCode { get; protected set; } + public bool IsFailure => !IsSuccess; - protected RawHttpClientResult(bool isSuccess, string error, HttpStatusCode statusCode) + public RawHttpClientResult(bool isSuccess, string error, HttpStatusCode statusCode, string errorBody = null) { IsSuccess = isSuccess; Error = error; StatusCode = statusCode; + ErrorBody = errorBody; } } @@ -21,13 +33,13 @@ namespace Sdk.WebApi.WebApi { public T Value { get; private set; } - protected internal RawHttpClientResult(T value, bool isSuccess, string error, HttpStatusCode statusCode) - : base(isSuccess, error, statusCode) + protected internal RawHttpClientResult(T value, bool isSuccess, string error, HttpStatusCode statusCode, string errorBody) + : base(isSuccess, error, statusCode, errorBody) { Value = value; } - public static RawHttpClientResult Fail(string message, HttpStatusCode statusCode) => new RawHttpClientResult(default(T), false, message, statusCode); - public static RawHttpClientResult Ok(T value) => new RawHttpClientResult(value, true, string.Empty, HttpStatusCode.OK); + public static RawHttpClientResult Fail(string message, HttpStatusCode statusCode, string errorBody) => new RawHttpClientResult(default(T), false, message, statusCode, errorBody); + public static RawHttpClientResult Ok(T value) => new RawHttpClientResult(value, true, string.Empty, HttpStatusCode.OK, null); } } diff --git a/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs index d206306f5..31819a4b2 100644 --- a/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs +++ b/src/Sdk/WebApi/WebApi/ResultsHttpClient.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; @@ -132,7 +133,7 @@ namespace GitHub.Services.Results.Client private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken) { - var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat); + var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture); var request = new StepSummaryMetadataCreate() { WorkflowJobRunBackendId = jobId, @@ -148,7 +149,7 @@ namespace GitHub.Services.Results.Client private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken) { - var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat); + var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture); var request = new StepLogsMetadataCreate() { WorkflowJobRunBackendId = jobId, @@ -164,7 +165,7 @@ namespace GitHub.Services.Results.Client private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken) { - var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat); + var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture); var request = new JobLogsMetadataCreate() { WorkflowJobRunBackendId = jobId, @@ -519,8 +520,8 @@ namespace GitHub.Services.Results.Client Number = r.Order.GetValueOrDefault(), Name = r.Name, Status = ConvertStateToStatus(r.State.GetValueOrDefault()), - StartedAt = r.StartTime?.ToString(Constants.TimestampFormat), - CompletedAt = r.FinishTime?.ToString(Constants.TimestampFormat), + StartedAt = r.StartTime?.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture), + CompletedAt = r.FinishTime?.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture), Conclusion = ConvertResultToConclusion(r.Result) }; } @@ -565,7 +566,7 @@ namespace GitHub.Services.Results.Client public async Task UpdateWorkflowStepsAsync(Guid planId, IEnumerable records, CancellationToken cancellationToken) { - var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat); + var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat, CultureInfo.InvariantCulture); var stepRecords = records.Where(r => String.Equals(r.RecordType, "Task", StringComparison.Ordinal)); var stepUpdateRequests = stepRecords.GroupBy(r => r.ParentId).Select(sg => new StepsUpdateRequest() { diff --git a/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs b/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs index e4aa84a73..8dc275c00 100644 --- a/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs +++ b/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs @@ -24,6 +24,7 @@ namespace GitHub.Services.WebApi HttpStatusCode = (HttpStatusCode)info.GetInt32("HttpStatusCode"); } + [Obsolete] [SecurityCritical] public override void GetObjectData(SerializationInfo info, StreamingContext context) { diff --git a/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs b/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs index 6b7e0c348..68a99cf70 100644 --- a/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs +++ b/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs @@ -166,6 +166,21 @@ namespace GitHub.Services.WebApi } } + public override JWTAlgorithm SignatureAlgorithm + { + get + { + if (m_signaturePadding == RSASignaturePadding.Pss) + { + return JWTAlgorithm.PS256; + } + else + { + return base.SignatureAlgorithm; + } + } + } + protected override Byte[] GetSignature(Byte[] input) { using (var rsa = m_factory()) diff --git a/src/Test/L0/CommandLineParserL0.cs b/src/Test/L0/CommandLineParserL0.cs index 19ab497fa..e502868ba 100644 --- a/src/Test/L0/CommandLineParserL0.cs +++ b/src/Test/L0/CommandLineParserL0.cs @@ -68,7 +68,7 @@ namespace GitHub.Runner.Common.Tests trace.Info("Parsed"); trace.Info("Commands: {0}", clp.Commands.Count); - Assert.True(clp.Commands.Count == 2); + Assert.Equal(2, clp.Commands.Count); } } @@ -88,7 +88,7 @@ namespace GitHub.Runner.Common.Tests trace.Info("Parsed"); trace.Info("Args: {0}", clp.Args.Count); - Assert.True(clp.Args.Count == 2); + Assert.Equal(2, clp.Args.Count); Assert.True(clp.Args.ContainsKey("arg1")); Assert.Equal("arg1val", clp.Args["arg1"]); Assert.True(clp.Args.ContainsKey("arg2")); @@ -112,7 +112,7 @@ namespace GitHub.Runner.Common.Tests trace.Info("Parsed"); trace.Info("Args: {0}", clp.Flags.Count); - Assert.True(clp.Flags.Count == 2); + Assert.Equal(2, clp.Flags.Count); Assert.Contains("flag1", clp.Flags); Assert.Contains("flag2", clp.Flags); } diff --git a/src/Test/L0/ConstantGenerationL0.cs b/src/Test/L0/ConstantGenerationL0.cs index 204248516..f3c1b8f9e 100644 --- a/src/Test/L0/ConstantGenerationL0.cs +++ b/src/Test/L0/ConstantGenerationL0.cs @@ -24,7 +24,7 @@ namespace GitHub.Runner.Common.Tests "osx-arm64" }; - Assert.True(BuildConstants.Source.CommitHash.Length == 40, $"CommitHash should be SHA-1 hash {BuildConstants.Source.CommitHash}"); + Assert.Equal(40, BuildConstants.Source.CommitHash.Length); Assert.True(validPackageNames.Contains(BuildConstants.RunnerPackage.PackageName), $"PackageName should be one of the following '{string.Join(", ", validPackageNames)}', current PackageName is '{BuildConstants.RunnerPackage.PackageName}'"); } } diff --git a/src/Test/L0/HostContextL0.cs b/src/Test/L0/HostContextL0.cs index 017a7dc29..2b6a0b590 100644 --- a/src/Test/L0/HostContextL0.cs +++ b/src/Test/L0/HostContextL0.cs @@ -1,10 +1,10 @@ -using GitHub.Runner.Common.Util; -using System; +using System; using System.IO; using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using System.Threading; +using System.Threading.Tasks; using Xunit; namespace GitHub.Runner.Common.Tests @@ -172,6 +172,133 @@ namespace GitHub.Runner.Common.Tests } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void AuthMigrationDisabledByDefault() + { + try + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", "100"); + + // Arrange. + Setup(); + + // Assert. + Assert.False(_hc.AllowAuthMigration); + + // Change migration state is error free. + _hc.EnableAuthMigration("L0Test"); + _hc.DeferAuthMigration(TimeSpan.FromHours(1), "L0Test"); + } + finally + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", null); + // Cleanup. + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task AuthMigrationReenableTaskNotRunningByDefault() + { + try + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", "50"); + + // Arrange. + Setup(); + + // Assert. + Assert.False(_hc.AllowAuthMigration); + await Task.Delay(TimeSpan.FromMilliseconds(200)); + } + finally + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", null); + // Cleanup. + Teardown(); + } + + var logFile = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), $"trace_{nameof(HostContextL0)}_{nameof(AuthMigrationReenableTaskNotRunningByDefault)}.log"); + var logContent = await File.ReadAllTextAsync(logFile); + Assert.Contains("HostContext", logContent); + Assert.DoesNotContain("Auth migration defer timer", logContent); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void AuthMigrationEnableDisable() + { + try + { + // Arrange. + Setup(); + + var eventFiredCount = 0; + _hc.AuthMigrationChanged += (sender, e) => + { + eventFiredCount++; + Assert.Equal("L0Test", e.Trace); + }; + + // Assert. + _hc.EnableAuthMigration("L0Test"); + Assert.True(_hc.AllowAuthMigration); + + _hc.DeferAuthMigration(TimeSpan.FromHours(1), "L0Test"); + Assert.False(_hc.AllowAuthMigration); + Assert.Equal(2, eventFiredCount); + } + finally + { + // Cleanup. + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task AuthMigrationAutoReset() + { + try + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", "100"); + + // Arrange. + Setup(); + + var eventFiredCount = 0; + _hc.AuthMigrationChanged += (sender, e) => + { + eventFiredCount++; + Assert.NotEmpty(e.Trace); + }; + + // Assert. + _hc.EnableAuthMigration("L0Test"); + Assert.True(_hc.AllowAuthMigration); + + _hc.DeferAuthMigration(TimeSpan.FromMilliseconds(500), "L0Test"); + Assert.False(_hc.AllowAuthMigration); + + await Task.Delay(TimeSpan.FromSeconds(1)); + Assert.True(_hc.AllowAuthMigration); + Assert.Equal(3, eventFiredCount); + } + finally + { + Environment.SetEnvironmentVariable("_GITHUB_ACTION_AUTH_MIGRATION_REFRESH_INTERVAL", null); + + // Cleanup. + Teardown(); + } + } + private void Setup([CallerMemberName] string testName = "") { _tokenSource = new CancellationTokenSource(); diff --git a/src/Test/L0/Listener/BrokerMessageListenerL0.cs b/src/Test/L0/Listener/BrokerMessageListenerL0.cs index 7dface3b2..c42d134dd 100644 --- a/src/Test/L0/Listener/BrokerMessageListenerL0.cs +++ b/src/Test/L0/Listener/BrokerMessageListenerL0.cs @@ -1,4 +1,5 @@ using System; +using System.IO; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; @@ -16,9 +17,8 @@ namespace GitHub.Runner.Common.Tests.Listener private readonly RunnerSettings _settings; private readonly Mock _config; private readonly Mock _brokerServer; + private readonly Mock _runnerServer; private readonly Mock _credMgr; - private Mock _store; - public BrokerMessageListenerL0() { @@ -26,14 +26,14 @@ namespace GitHub.Runner.Common.Tests.Listener _config = new Mock(); _config.Setup(x => x.LoadSettings()).Returns(_settings); _credMgr = new Mock(); - _store = new Mock(); _brokerServer = new Mock(); + _runnerServer = new Mock(); } [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void CreatesSession() + public async Task CreatesSession() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -48,19 +48,17 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); - _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); - _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); // Act. BrokerMessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); trace.Info("result: {0}", result); // Assert. - Assert.True(result); + Assert.Equal(CreateSessionResult.Success, result); _brokerServer .Verify(x => x.CreateSessionAsync( It.Is(y => y != null), @@ -68,13 +66,353 @@ namespace GitHub.Runner.Common.Tests.Listener } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task HandleAuthMigrationChanged() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.Equal(CreateSessionResult.Success, result); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + + tc.EnableAuthMigration("L0Test"); + + var traceFile = Path.GetTempFileName(); + File.Copy(tc.TraceFileName, traceFile, true); + Assert.Contains("Auth migration changed", File.ReadAllText(traceFile)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task CreatesSession_DeferAuthMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var throwException = true; + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(async (TaskAgentSession session, CancellationToken token) => + { + await Task.Yield(); + if (throwException) + { + throwException = false; + throw new NotSupportedException("Error during create session"); + } + + return expectedSession; + }); + + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + tc.EnableAuthMigration("L0Test"); + Assert.True(tc.AllowAuthMigration); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.Equal(CreateSessionResult.Success, result); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Exactly(2)); + _credMgr.Verify(x => x.LoadCredentials(true), Times.Exactly(2)); + + Assert.False(tc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task GetNextMessage() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + var expectedMessage = new TaskAgentMessage(); + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(Task.FromResult(expectedMessage)); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + Assert.Equal(CreateSessionResult.Success, result); + + TaskAgentMessage message = await listener.GetNextMessageAsync(tokenSource.Token); + trace.Info("message: {0}", message); + + // Assert. + Assert.Equal(expectedMessage, message); + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny()), Times.Once()); + + _brokerServer.Verify(x => x.ConnectAsync(It.IsAny(), It.IsAny()), Times.Once()); + + _credMgr.Verify(x => x.LoadCredentials(true), Times.Once()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task GetNextMessage_EnableAuthMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + var expectedMessage = new TaskAgentMessage(); + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(Task.FromResult(expectedMessage)); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + Assert.Equal(CreateSessionResult.Success, result); + + tc.EnableAuthMigration("L0Test"); + + TaskAgentMessage message = await listener.GetNextMessageAsync(tokenSource.Token); + trace.Info("message: {0}", message); + + // Assert. + Assert.Equal(expectedMessage, message); + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny()), Times.Once()); + + _brokerServer.Verify(x => x.ConnectAsync(It.IsAny(), It.IsAny()), Times.Exactly(2)); + + _credMgr.Verify(x => x.LoadCredentials(true), Times.Exactly(2)); + + Assert.True(tc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task GetNextMessage_AuthMigrationFallback() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + tc.EnableAuthMigration("L0Test"); + + // Arrange. + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + var expectedMessage = new TaskAgentMessage(); + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny())) + .Returns(async (Guid? sessionId, TaskAgentStatus status, string version, string os, string architecture, bool disableUpdate, CancellationToken token) => + { + await Task.Yield(); + if (tc.AllowAuthMigration) + { + throw new NotSupportedException("Error during get message"); + } + + return expectedMessage; + }); + + // Act. + BrokerMessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + Assert.Equal(CreateSessionResult.Success, result); + + Assert.True(tc.AllowAuthMigration); + + TaskAgentMessage message = await listener.GetNextMessageAsync(tokenSource.Token); + trace.Info("message: {0}", message); + + // Assert. + Assert.Equal(expectedMessage, message); + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny(), + It.IsAny()), Times.Exactly(2)); + + _brokerServer.Verify(x => x.ConnectAsync(It.IsAny(), It.IsAny()), Times.Exactly(3)); + + _credMgr.Verify(x => x.LoadCredentials(true), Times.Exactly(3)); + + Assert.False(tc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task CreatesSessionWithProvidedSettings() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + _brokerServer + .Setup(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + // Make sure the config is never called when settings are provided + _config.Setup(x => x.LoadSettings()).Throws(new InvalidOperationException("Should not be called")); + + // Act. + // Use the constructor that accepts settings + BrokerMessageListener listener = new(_settings); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.Equal(CreateSessionResult.Success, result); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + + // Verify LoadSettings was never called + _config.Verify(x => x.LoadSettings(), Times.Never()); + } + } + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") { TestHostContext tc = new(this, testName); tc.SetSingleton(_config.Object); tc.SetSingleton(_credMgr.Object); - tc.SetSingleton(_store.Object); tc.SetSingleton(_brokerServer.Object); + tc.SetSingleton(_runnerServer.Object); return tc; } } diff --git a/src/Test/L0/Listener/CommandSettingsL0.cs b/src/Test/L0/Listener/CommandSettingsL0.cs index ed7b672b8..f823ba82f 100644 --- a/src/Test/L0/Listener/CommandSettingsL0.cs +++ b/src/Test/L0/Listener/CommandSettingsL0.cs @@ -806,7 +806,7 @@ namespace GitHub.Runner.Common.Tests "test runner" }); // Assert. - Assert.True(command.Validate().Count == 0); + Assert.Equal(0, command.Validate().Count); } } @@ -844,7 +844,7 @@ namespace GitHub.Runner.Common.Tests var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{flag}" }); // Assert. - Assert.True(command.Validate().Count == 0); + Assert.Equal(0, command.Validate().Count); } } @@ -874,7 +874,7 @@ namespace GitHub.Runner.Common.Tests var command = new CommandSettings(hc, args: new string[] { validCommand, $"--{arg}", argValue }); // Assert. - Assert.True(command.Validate().Count == 0); + Assert.Equal(0, command.Validate().Count); } } diff --git a/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs b/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs index 5ee14404a..3c698fdda 100644 --- a/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs +++ b/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs @@ -190,11 +190,11 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration trace.Info("Configured, verifying all the parameter value"); var s = configManager.LoadSettings(); Assert.NotNull(s); - Assert.True(s.ServerUrl.Equals(_expectedServerUrl)); - Assert.True(s.AgentName.Equals(_expectedAgentName)); - Assert.True(s.PoolId.Equals(_secondRunnerGroupId)); - Assert.True(s.WorkFolder.Equals(_expectedWorkFolder)); - Assert.True(s.Ephemeral.Equals(true)); + Assert.Equal(_expectedServerUrl, s.ServerUrl); + Assert.Equal(_expectedAgentName, s.AgentName); + Assert.Equal(_secondRunnerGroupId, s.PoolId); + Assert.Equal(_expectedWorkFolder, s.WorkFolder); + Assert.True(s.Ephemeral); // validate GetAgentPoolsAsync gets called twice with automation pool type _runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny(), It.Is(p => p == TaskAgentPoolType.Automation)), Times.Exactly(2)); @@ -292,11 +292,11 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration trace.Info("Configured, verifying all the parameter value"); var s = configManager.LoadSettings(); Assert.NotNull(s); - Assert.True(s.ServerUrl.Equals(_expectedServerUrl)); - Assert.True(s.AgentName.Equals(_expectedAgentName)); - Assert.True(s.PoolId.Equals(_secondRunnerGroupId)); - Assert.True(s.WorkFolder.Equals(_expectedWorkFolder)); - Assert.True(s.Ephemeral.Equals(true)); + Assert.Equal(_expectedServerUrl, s.ServerUrl); + Assert.Equal(_expectedAgentName, s.AgentName); + Assert.Equal(_secondRunnerGroupId, s.PoolId); + Assert.Equal(_expectedWorkFolder, s.WorkFolder); + Assert.True(s.Ephemeral); // validate GetAgentPoolsAsync gets called twice with automation pool type _runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny(), It.Is(p => p == TaskAgentPoolType.Automation)), Times.Exactly(2)); diff --git a/src/Test/L0/Listener/Configuration/RunnerCredentialL0.cs b/src/Test/L0/Listener/Configuration/RunnerCredentialL0.cs index 609a71294..a2c5d0c20 100644 --- a/src/Test/L0/Listener/Configuration/RunnerCredentialL0.cs +++ b/src/Test/L0/Listener/Configuration/RunnerCredentialL0.cs @@ -1,14 +1,18 @@ -using GitHub.Runner.Listener; +using System.Collections.Generic; +using System.Security.Cryptography; +using GitHub.Runner.Listener; using GitHub.Runner.Listener.Configuration; using GitHub.Services.Common; using GitHub.Services.OAuth; +using Moq; +using Xunit; namespace GitHub.Runner.Common.Tests.Listener.Configuration { public class TestRunnerCredential : CredentialProvider { public TestRunnerCredential() : base("TEST") { } - public override VssCredentials GetVssCredentials(IHostContext context) + public override VssCredentials GetVssCredentials(IHostContext context, bool allowAuthUrlV2) { Tracing trace = context.GetTrace("OuthAccessToken"); trace.Info("GetVssCredentials()"); @@ -23,4 +27,85 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration { } } -} + + public class OAuthCredentialTestsL0 + { + private Mock _rsaKeyManager = new Mock(); + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "OAuthCredential")] + public void NotUseAuthV2Url() + { + using (TestHostContext hc = new(this)) + { + // Arrange. + var oauth = new OAuthCredential(); + oauth.CredentialData = new CredentialData() + { + Scheme = Constants.Configuration.OAuth + }; + oauth.CredentialData.Data.Add("clientId", "someClientId"); + oauth.CredentialData.Data.Add("authorizationUrl", "http://myserver/"); + oauth.CredentialData.Data.Add("authorizationUrlV2", "http://myserverv2/"); + + _rsaKeyManager.Setup(x => x.GetKey()).Returns(RSA.Create(2048)); + hc.SetSingleton(_rsaKeyManager.Object); + + // Act. + var cred = oauth.GetVssCredentials(hc, false); // not allow auth v2 + + var cred2 = oauth.GetVssCredentials(hc, true); // use auth v2 but hostcontext doesn't + + hc.EnableAuthMigration("L0Test"); + var cred3 = oauth.GetVssCredentials(hc, false); // not use auth v2 but hostcontext does + + oauth.CredentialData.Data.Remove("authorizationUrlV2"); + var cred4 = oauth.GetVssCredentials(hc, true); // v2 url is not there + + // Assert. + Assert.Equal("http://myserver/", (cred.Federated as VssOAuthCredential).AuthorizationUrl.AbsoluteUri); + Assert.Equal("someClientId", (cred.Federated as VssOAuthCredential).ClientCredential.ClientId); + + Assert.Equal("http://myserver/", (cred2.Federated as VssOAuthCredential).AuthorizationUrl.AbsoluteUri); + Assert.Equal("someClientId", (cred2.Federated as VssOAuthCredential).ClientCredential.ClientId); + + Assert.Equal("http://myserver/", (cred3.Federated as VssOAuthCredential).AuthorizationUrl.AbsoluteUri); + Assert.Equal("someClientId", (cred3.Federated as VssOAuthCredential).ClientCredential.ClientId); + + Assert.Equal("http://myserver/", (cred4.Federated as VssOAuthCredential).AuthorizationUrl.AbsoluteUri); + Assert.Equal("someClientId", (cred4.Federated as VssOAuthCredential).ClientCredential.ClientId); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "OAuthCredential")] + public void UseAuthV2Url() + { + using (TestHostContext hc = new(this)) + { + // Arrange. + var oauth = new OAuthCredential(); + oauth.CredentialData = new CredentialData() + { + Scheme = Constants.Configuration.OAuth + }; + oauth.CredentialData.Data.Add("clientId", "someClientId"); + oauth.CredentialData.Data.Add("authorizationUrl", "http://myserver/"); + oauth.CredentialData.Data.Add("authorizationUrlV2", "http://myserverv2/"); + + _rsaKeyManager.Setup(x => x.GetKey()).Returns(RSA.Create(2048)); + hc.SetSingleton(_rsaKeyManager.Object); + + // Act. + hc.EnableAuthMigration("L0Test"); + var cred = oauth.GetVssCredentials(hc, true); + + // Assert. + Assert.Equal("http://myserverv2/", (cred.Federated as VssOAuthCredential).AuthorizationUrl.AbsoluteUri); + Assert.Equal("someClientId", (cred.Federated as VssOAuthCredential).ClientCredential.ClientId); + } + } + } +} \ No newline at end of file diff --git a/src/Test/L0/Listener/ErrorThrottlerL0.cs b/src/Test/L0/Listener/ErrorThrottlerL0.cs new file mode 100644 index 000000000..e4118b181 --- /dev/null +++ b/src/Test/L0/Listener/ErrorThrottlerL0.cs @@ -0,0 +1,213 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Listener; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Tests; +using System.Runtime.CompilerServices; +using GitHub.Services.WebApi; +using Moq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Listener +{ + public sealed class ErrorThrottlerL0 + { + [Theory] + [InlineData(1)] + [InlineData(2)] + [InlineData(3)] + [InlineData(4)] + [InlineData(5)] + [InlineData(6)] + [InlineData(7)] + [InlineData(8)] + public async void TestIncrementAndWait(int totalAttempts) + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange + var errorThrottler = new ErrorThrottler(); + errorThrottler.Initialize(hc); + var eventArgs = new List(); + hc.Delaying += (sender, args) => + { + eventArgs.Add(args); + }; + + // Act + for (int attempt = 1; attempt <= totalAttempts; attempt++) + { + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + } + + // Assert + Assert.Equal(totalAttempts - 1, eventArgs.Count); + for (int i = 0; i < eventArgs.Count; i++) + { + // Expected milliseconds + int expectedMin; + int expectedMax; + + switch (i) + { + case 0: + expectedMin = 1000; // Min backoff + expectedMax = 1000; + break; + case 1: + expectedMin = 1800; // Min + 0.8 * Coefficient + expectedMax = 2200; // Min + 1.2 * Coefficient + break; + case 2: + expectedMin = 3400; // Min + 0.8 * Coefficient * 3 + expectedMax = 4600; // Min + 1.2 * Coefficient * 3 + break; + case 3: + expectedMin = 6600; // Min + 0.8 * Coefficient * 7 + expectedMax = 9400; // Min + 1.2 * Coefficient * 7 + break; + case 4: + expectedMin = 13000; // Min + 0.8 * Coefficient * 15 + expectedMax = 19000; // Min + 1.2 * Coefficient * 15 + break; + case 5: + expectedMin = 25800; // Min + 0.8 * Coefficient * 31 + expectedMax = 38200; // Min + 1.2 * Coefficient * 31 + break; + case 6: + expectedMin = 51400; // Min + 0.8 * Coefficient * 63 + expectedMax = 60000; // Max backoff + break; + case 7: + expectedMin = 60000; + expectedMax = 60000; + break; + default: + throw new NotSupportedException("Unexpected eventArgs count"); + } + + var actualMilliseconds = eventArgs[i].Delay.TotalMilliseconds; + Assert.True(expectedMin <= actualMilliseconds, $"Unexpected min delay for eventArgs[{i}]. Expected min {expectedMin}, actual {actualMilliseconds}"); + Assert.True(expectedMax >= actualMilliseconds, $"Unexpected max delay for eventArgs[{i}]. Expected max {expectedMax}, actual {actualMilliseconds}"); + } + } + } + + [Fact] + public async void TestReset() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange + var errorThrottler = new ErrorThrottler(); + errorThrottler.Initialize(hc); + var eventArgs = new List(); + hc.Delaying += (sender, args) => + { + eventArgs.Add(args); + }; + + // Act + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + errorThrottler.Reset(); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + + // Assert + Assert.Equal(4, eventArgs.Count); + for (int i = 0; i < eventArgs.Count; i++) + { + // Expected milliseconds + int expectedMin; + int expectedMax; + + switch (i) + { + case 0: + case 2: + expectedMin = 1000; // Min backoff + expectedMax = 1000; + break; + case 1: + case 3: + expectedMin = 1800; // Min + 0.8 * Coefficient + expectedMax = 2200; // Min + 1.2 * Coefficient + break; + default: + throw new NotSupportedException("Unexpected eventArgs count"); + } + + var actualMilliseconds = eventArgs[i].Delay.TotalMilliseconds; + Assert.True(expectedMin <= actualMilliseconds, $"Unexpected min delay for eventArgs[{i}]. Expected min {expectedMin}, actual {actualMilliseconds}"); + Assert.True(expectedMax >= actualMilliseconds, $"Unexpected max delay for eventArgs[{i}]. Expected max {expectedMax}, actual {actualMilliseconds}"); + } + } + } + + [Fact] + public async void TestReceivesCancellationToken() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange + var errorThrottler = new ErrorThrottler(); + errorThrottler.Initialize(hc); + var eventArgs = new List(); + hc.Delaying += (sender, args) => + { + eventArgs.Add(args); + }; + var cancellationTokenSource1 = new CancellationTokenSource(); + var cancellationTokenSource2 = new CancellationTokenSource(); + var cancellationTokenSource3 = new CancellationTokenSource(); + + // Act + await errorThrottler.IncrementAndWaitAsync(cancellationTokenSource1.Token); + await errorThrottler.IncrementAndWaitAsync(cancellationTokenSource2.Token); + await errorThrottler.IncrementAndWaitAsync(cancellationTokenSource3.Token); + + // Assert + Assert.Equal(2, eventArgs.Count); + Assert.Equal(cancellationTokenSource2.Token, eventArgs[0].Token); + Assert.Equal(cancellationTokenSource3.Token, eventArgs[1].Token); + } + } + + [Fact] + public async void TestReceivesSender() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange + var errorThrottler = new ErrorThrottler(); + errorThrottler.Initialize(hc); + var senders = new List(); + hc.Delaying += (sender, args) => + { + senders.Add(sender); + }; + + // Act + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + await errorThrottler.IncrementAndWaitAsync(CancellationToken.None); + + // Assert + Assert.Equal(2, senders.Count); + Assert.Equal(hc, senders[0]); + Assert.Equal(hc, senders[1]); + } + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + return new TestHostContext(this, testName); + } + } +} diff --git a/src/Test/L0/Listener/JobDispatcherL0.cs b/src/Test/L0/Listener/JobDispatcherL0.cs index 9057e6bbf..3b26233a4 100644 --- a/src/Test/L0/Listener/JobDispatcherL0.cs +++ b/src/Test/L0/Listener/JobDispatcherL0.cs @@ -36,20 +36,23 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationStore = new Mock(); } - private Pipelines.AgentJobRequestMessage CreateJobRequestMessage() + private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string billingOwnerId = null) { TaskOrchestrationPlanReference plan = new(); TimelineReference timeline = null; Guid jobId = Guid.NewGuid(); - var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var result = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "someJob", "someJob", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); result.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + result.BillingOwnerId = billingOwnerId; return result; } - [Fact] + [Theory] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void DispatchesJobRequest() + [InlineData(null)] + [InlineData("billingOwnerId")] + public async void DispatchesJobRequest(string billingOwnerId) { //Arrange using (var hc = new TestHostContext(this)) @@ -65,7 +68,7 @@ namespace GitHub.Runner.Common.Tests.Listener jobDispatcher.Initialize(hc); var ts = new CancellationTokenSource(); - Pipelines.AgentJobRequestMessage message = CreateJobRequestMessage(); + Pipelines.AgentJobRequestMessage message = CreateJobRequestMessage(billingOwnerId); string strMessage = JsonUtility.ToString(message); _processInvoker.Setup(x => x.ExecuteAsync(It.IsAny(), It.IsAny(), "spawnclient 1 2", null, It.IsAny())) @@ -734,7 +737,10 @@ namespace GitHub.Runner.Common.Tests.Listener await jobDispatcher.WaitAsync(CancellationToken.None); Assert.True(jobDispatcher.RunOnceJobCompleted.Task.IsCompleted, "JobDispatcher should set task complete token for one time agent."); - Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent."); + if (jobDispatcher.RunOnceJobCompleted.Task.IsCompleted) + { + Assert.True(await jobDispatcher.RunOnceJobCompleted.Task, "JobDispatcher should set task complete token to 'TRUE' for one time agent."); + } } } @@ -806,7 +812,8 @@ namespace GitHub.Runner.Common.Tests.Listener }, null, new List(), - new ActionsEnvironmentReference("env") + new ActionsEnvironmentReference("env"), + null ); return message; } diff --git a/src/Test/L0/Listener/MessageListenerL0.cs b/src/Test/L0/Listener/MessageListenerL0.cs index 57a1f60d8..80792539b 100644 --- a/src/Test/L0/Listener/MessageListenerL0.cs +++ b/src/Test/L0/Listener/MessageListenerL0.cs @@ -51,7 +51,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void CreatesSession() + public async Task CreatesSession() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -67,7 +67,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); @@ -75,11 +75,11 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); trace.Info("result: {0}", result); // Assert. - Assert.True(result); + Assert.Equal(CreateSessionResult.Success, result); _runnerServer .Verify(x => x.CreateAgentSessionAsync( _settings.PoolId, @@ -95,69 +95,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void CreatesSessionWithBrokerMigration() - { - using (TestHostContext tc = CreateTestContext()) - using (var tokenSource = new CancellationTokenSource()) - { - Tracing trace = tc.GetTrace(); - - // Arrange. - var expectedSession = new TaskAgentSession() - { - OwnerName = "legacy", - BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com")) - }; - - var expectedBrokerSession = new TaskAgentSession() - { - OwnerName = "broker" - }; - - _runnerServer - .Setup(x => x.CreateAgentSessionAsync( - _settings.PoolId, - It.Is(y => y != null), - tokenSource.Token)) - .Returns(Task.FromResult(expectedSession)); - - _brokerServer - .Setup(x => x.CreateSessionAsync( - It.Is(y => y != null), - tokenSource.Token)) - .Returns(Task.FromResult(expectedBrokerSession)); - - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); - _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); - _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); - - // Act. - MessageListener listener = new(); - listener.Initialize(tc); - - bool result = await listener.CreateSessionAsync(tokenSource.Token); - trace.Info("result: {0}", result); - - // Assert. - Assert.True(result); - - _runnerServer - .Verify(x => x.CreateAgentSessionAsync( - _settings.PoolId, - It.Is(y => y != null), - tokenSource.Token), Times.Once()); - - _brokerServer - .Verify(x => x.CreateSessionAsync( - It.Is(y => y != null), - tokenSource.Token), Times.Once()); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void DeleteSession() + public async Task DeleteSession() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -177,7 +115,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); @@ -185,8 +123,8 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); - Assert.True(result); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); _runnerServer .Setup(x => x.DeleteAgentSessionAsync( @@ -204,84 +142,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void DeleteSessionWithBrokerMigration() - { - using (TestHostContext tc = CreateTestContext()) - using (var tokenSource = new CancellationTokenSource()) - { - Tracing trace = tc.GetTrace(); - - // Arrange. - var expectedSession = new TaskAgentSession() - { - OwnerName = "legacy", - BrokerMigrationMessage = new BrokerMigrationMessage(new Uri("https://broker.actions.github.com")) - }; - - var expectedBrokerSession = new TaskAgentSession() - { - SessionId = Guid.NewGuid(), - OwnerName = "broker" - }; - - _runnerServer - .Setup(x => x.CreateAgentSessionAsync( - _settings.PoolId, - It.Is(y => y != null), - tokenSource.Token)) - .Returns(Task.FromResult(expectedSession)); - - _brokerServer - .Setup(x => x.CreateSessionAsync( - It.Is(y => y != null), - tokenSource.Token)) - .Returns(Task.FromResult(expectedBrokerSession)); - - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); - _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); - _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); - - // Act. - MessageListener listener = new(); - listener.Initialize(tc); - - bool result = await listener.CreateSessionAsync(tokenSource.Token); - trace.Info("result: {0}", result); - - Assert.True(result); - - _runnerServer - .Verify(x => x.CreateAgentSessionAsync( - _settings.PoolId, - It.Is(y => y != null), - tokenSource.Token), Times.Once()); - - _brokerServer - .Verify(x => x.CreateSessionAsync( - It.Is(y => y != null), - tokenSource.Token), Times.Once()); - - _brokerServer - .Setup(x => x.DeleteSessionAsync(It.IsAny())) - .Returns(Task.CompletedTask); - - // Act. - await listener.DeleteSessionAsync(); - - - //Assert - _runnerServer - .Verify(x => x.DeleteAgentSessionAsync( - _settings.PoolId, expectedSession.SessionId, It.IsAny()), Times.Never()); - _brokerServer - .Verify(x => x.DeleteSessionAsync(It.IsAny()), Times.Once()); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Runner")] - public async void GetNextMessage() + public async Task GetNextMessage() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -301,7 +162,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); @@ -309,8 +170,8 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); - Assert.True(result); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); var arMessages = new TaskAgentMessage[] { @@ -362,7 +223,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void GetNextMessageWithBrokerMigration() + public async Task GetNextMessageWithBrokerMigration() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -382,7 +243,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); @@ -390,8 +251,8 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); - Assert.True(result); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com")); @@ -462,13 +323,22 @@ namespace GitHub.Runner.Common.Tests.Listener _brokerServer .Verify(x => x.GetRunnerMessageAsync( expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _credMgr + .Verify(x => x.LoadCredentials(true), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.UpdateConnectionIfNeeded(brokerMigrationMesage.BrokerBaseUrl, It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.ForceRefreshConnection(It.IsAny()), Times.Never); } } [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void CreateSessionWithOriginalCredential() + public async Task CreateSessionWithOriginalCredential() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -484,7 +354,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); var originalCred = new CredentialData() { Scheme = Constants.Configuration.OAuth }; originalCred.Data["authorizationUrl"] = "https://s.server"; @@ -497,11 +367,11 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); trace.Info("result: {0}", result); // Assert. - Assert.True(result); + Assert.Equal(CreateSessionResult.Success, result); _runnerServer .Verify(x => x.CreateAgentSessionAsync( _settings.PoolId, @@ -513,7 +383,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void SkipDeleteSession_WhenGetNextMessageGetTaskAgentAccessTokenExpiredException() + public async Task SkipDeleteSession_WhenGetNextMessageGetTaskAgentAccessTokenExpiredException() { using (TestHostContext tc = CreateTestContext()) using (var tokenSource = new CancellationTokenSource()) @@ -533,7 +403,7 @@ namespace GitHub.Runner.Common.Tests.Listener tokenSource.Token)) .Returns(Task.FromResult(expectedSession)); - _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); @@ -541,8 +411,8 @@ namespace GitHub.Runner.Common.Tests.Listener MessageListener listener = new(); listener.Initialize(tc); - bool result = await listener.CreateSessionAsync(tokenSource.Token); - Assert.True(result); + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); _runnerServer .Setup(x => x.GetAgentMessageAsync( @@ -571,5 +441,301 @@ namespace GitHub.Runner.Common.Tests.Listener _settings.PoolId, expectedSession.SessionId, It.IsAny()), Times.Never); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task HandleAuthMigrationChanged() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.Equal(CreateSessionResult.Success, result); + _runnerServer + .Verify(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + _brokerServer + .Verify(x => x.CreateSessionAsync( + It.Is(y => y != null), + tokenSource.Token), Times.Never()); + + tc.EnableAuthMigration("L0Test"); + + var traceFile = Path.GetTempFileName(); + File.Copy(tc.TraceFileName, traceFile, true); + Assert.Contains("Auth migration changed", File.ReadAllText(traceFile)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task GetNextMessageWithBrokerMigration_AuthMigrationFallback() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(expectedSession, Guid.NewGuid()); + + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + tc.EnableAuthMigration("L0Test"); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); + + var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com")); + + var arMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = JsonUtility.ToString(brokerMigrationMesage), + MessageType = BrokerMigrationMessage.MessageType + }, + }; + + var brokerMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = "somebody1", + MessageId = 4234, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + }, + new TaskAgentMessage + { + Body = "somebody2", + MessageId = 4235, + MessageType = JobCancelMessage.MessageType + }, + null, //should be skipped by GetNextMessageAsync implementation + null, + new TaskAgentMessage + { + Body = "somebody3", + MessageId = 4236, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + } + }; + var brokerMessageQueue = new Queue(brokerMessages); + + _runnerServer + .Setup(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + await Task.Yield(); + return arMessages[0]; // always send migration message + }); + + var counter = 0; + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Guid sessionId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + counter++; + await Task.Yield(); + if (counter == 2) + { + throw new NotSupportedException("Something wrong."); + } + + return brokerMessageQueue.Dequeue(); + }); + + TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token); + Assert.Equal(brokerMessages[0], message1); + Assert.Equal(brokerMessages[1], message2); + Assert.Equal(brokerMessages[4], message3); + + //Assert + _runnerServer + .Verify(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length + 1)); + + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length + 1)); + + _credMgr + .Verify(x => x.LoadCredentials(true), Times.Exactly(brokerMessages.Length + 1)); + + _brokerServer + .Verify(x => x.UpdateConnectionIfNeeded(brokerMigrationMesage.BrokerBaseUrl, It.IsAny()), Times.Exactly(brokerMessages.Length + 1)); + + _brokerServer + .Verify(x => x.ForceRefreshConnection(It.IsAny()), Times.Once()); + + Assert.False(tc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task GetNextMessageWithBrokerMigration_EnableAuthMigration() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(expectedSession, Guid.NewGuid()); + + _runnerServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _credMgr.Setup(x => x.LoadCredentials(It.IsAny())).Returns(new VssCredentials()); + _store.Setup(x => x.GetCredentials()).Returns(new CredentialData() { Scheme = Constants.Configuration.OAuthAccessToken }); + _store.Setup(x => x.GetMigratedCredentials()).Returns(default(CredentialData)); + + // Act. + MessageListener listener = new(); + listener.Initialize(tc); + + CreateSessionResult result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.Equal(CreateSessionResult.Success, result); + + var brokerMigrationMesage = new BrokerMigrationMessage(new Uri("https://actions.broker.com")); + + var arMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = JsonUtility.ToString(brokerMigrationMesage), + MessageType = BrokerMigrationMessage.MessageType + }, + }; + + var brokerMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = "somebody1", + MessageId = 4234, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + }, + new TaskAgentMessage + { + Body = "somebody2", + MessageId = 4235, + MessageType = JobCancelMessage.MessageType + }, + null, //should be skipped by GetNextMessageAsync implementation + null, + new TaskAgentMessage + { + Body = "somebody3", + MessageId = 4236, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + } + }; + var brokerMessageQueue = new Queue(brokerMessages); + + _runnerServer + .Setup(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + await Task.Yield(); + return arMessages[0]; // always send migration message + }); + + _brokerServer + .Setup(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(async (Guid sessionId, TaskAgentStatus status, string runnerVersion, string os, string architecture, bool disableUpdate, CancellationToken cancellationToken) => + { + await Task.Yield(); + if (!tc.AllowAuthMigration) + { + tc.EnableAuthMigration("L0Test"); + } + + return brokerMessageQueue.Dequeue(); + }); + + TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token); + Assert.Equal(brokerMessages[0], message1); + Assert.Equal(brokerMessages[1], message2); + Assert.Equal(brokerMessages[4], message3); + + //Assert + _runnerServer + .Verify(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.GetRunnerMessageAsync( + expectedSession.SessionId, TaskAgentStatus.Online, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _credMgr + .Verify(x => x.LoadCredentials(true), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.UpdateConnectionIfNeeded(brokerMigrationMesage.BrokerBaseUrl, It.IsAny()), Times.Exactly(brokerMessages.Length)); + + _brokerServer + .Verify(x => x.ForceRefreshConnection(It.IsAny()), Times.Once()); + + Assert.True(tc.AllowAuthMigration); + } + } } } diff --git a/src/Test/L0/Listener/RunnerConfigUpdaterTests.cs b/src/Test/L0/Listener/RunnerConfigUpdaterTests.cs new file mode 100644 index 000000000..63deafe5b --- /dev/null +++ b/src/Test/L0/Listener/RunnerConfigUpdaterTests.cs @@ -0,0 +1,678 @@ +using System; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Tests; +using GitHub.Runner.Listener; +using GitHub.Runner.Sdk; +using Moq; +using Xunit; + +namespace GitHub.Runner.Tests.Listener +{ + public class RunnerConfigUpdaterL0 + { + private Mock _configurationStore; + private Mock _runnerServer; + + public RunnerConfigUpdaterL0() + { + _configurationStore = new Mock(); + _runnerServer = new Mock(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_InvalidRunnerQualifiedId_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var invalidRunnerQualifiedId = "invalid/runner/qualified/id"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(invalidRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Runner qualified id")), It.IsAny()), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_ValidRunnerQualifiedId_ShouldNotReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Runner qualified id")), It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_InvalidConfigType_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var invalidConfigType = "invalidConfigType"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, invalidConfigType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Invalid config type")), It.IsAny()), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UpdateRunnerSettings_ShouldSucceed() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(setting))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "runner"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(1, "runner", It.IsAny(), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner settings updated successfully")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UpdateRunnerSettings_IgnoredEmptyRefreshResult() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(1, "runner", It.IsAny(), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner settings updated successfully")), It.IsAny()), Times.Never); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UpdateRunnerCredentials_ShouldSucceed() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("ClientId", "12345"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(credData))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + hc.EnableAuthMigration("L0Test"); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(1, "credentials", It.IsAny(), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner credentials updated successfully")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Once); + Assert.False(hc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UpdateRunnerCredentials_IgnoredEmptyRefreshResult() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("ClientId", "12345"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(1, "credentials", It.IsAny(), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner credentials updated successfully")), It.IsAny()), Times.Never); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshRunnerSettingsFailure_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "runner"), It.IsAny(), It.IsAny())).ThrowsAsync(new Exception("Refresh failed")); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Failed to refresh")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshRunnerCredentialsFailure_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("ClientId", "12345"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ThrowsAsync(new Exception("Refresh failed")); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Failed to refresh")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshRunnerSettingsWithDifferentRunnerId_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var differentRunnerSetting = new RunnerSettings { AgentId = 2, AgentName = "agent1" }; + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(differentRunnerSetting))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "runner"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner id in refreshed config")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshRunnerSettingsWithDifferentRunnerName_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var differentRunnerSetting = new RunnerSettings { AgentId = 1, AgentName = "agent2" }; + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(differentRunnerSetting))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "runner"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner name in refreshed config")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshCredentialsWithDifferentScheme_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("ClientId", "12345"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var differentCredData = new CredentialData + { + Scheme = "PAT" + }; + differentCredData.Data.Add("ClientId", "12345"); + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(differentCredData))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Credential scheme in refreshed config")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshOAuthCredentialsWithDifferentClientId_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("clientId", "12345"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var differentCredData = new CredentialData + { + Scheme = "OAuth" + }; + differentCredData.Data.Add("clientId", "67890"); + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(differentCredData))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Credential clientId in refreshed config")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RefreshOAuthCredentialsWithDifferentAuthUrl_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("clientId", "12345"); + credData.Data.Add("authorizationUrl", "http://example.com/"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var differentCredData = new CredentialData + { + Scheme = "OAuth" + }; + differentCredData.Data.Add("clientId", "12345"); + differentCredData.Data.Add("authorizationUrl", "http://example2.com/"); + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(differentCredData))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Credential authorizationUrl in refreshed config")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UnsupportedServiceType_ShouldReportTelemetry() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "unsupported-service"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Invalid service type")), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_RunnerAdminService_ShouldThrowNotSupported() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "runner"; + var serviceType = "runner-admin"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is((s) => s.Contains("Runner admin service is not supported")), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + _configurationStore.Verify(x => x.SaveMigratedSettings(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task UpdateRunnerConfigAsync_UpdateRunnerCredentials_EnableDisableAuthMigration() + { + using (var hc = new TestHostContext(this)) + { + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_runnerServer.Object); + + // Arrange + var setting = new RunnerSettings { AgentId = 1, AgentName = "agent1" }; + _configurationStore.Setup(x => x.GetSettings()).Returns(setting); + var credData = new CredentialData + { + Scheme = "OAuth" + }; + credData.Data.Add("ClientId", "12345"); + credData.Data.Add("AuthorizationUrl", "https://example.com"); + credData.Data.Add("AuthorizationUrlV2", "https://example2.com"); + _configurationStore.Setup(x => x.GetCredentials()).Returns(credData); + + IOUtil.SaveObject(setting, hc.GetConfigFile(WellKnownConfigFile.Runner)); + IOUtil.SaveObject(credData, hc.GetConfigFile(WellKnownConfigFile.Credentials)); + + var encodedConfig = Convert.ToBase64String(Encoding.UTF8.GetBytes(StringUtil.ConvertToJson(credData))); + _runnerServer.Setup(x => x.RefreshRunnerConfigAsync(It.IsAny(), It.Is(s => s == "credentials"), It.IsAny(), It.IsAny())).ReturnsAsync(encodedConfig); + + var _runnerConfigUpdater = new RunnerConfigUpdater(); + _runnerConfigUpdater.Initialize(hc); + Assert.False(hc.AllowAuthMigration); + + var validRunnerQualifiedId = "valid/runner/qualifiedid/1"; + var configType = "credentials"; + var serviceType = "pipelines"; + var configRefreshUrl = "http://example.com"; + + // Act + await _runnerConfigUpdater.UpdateRunnerConfigAsync(validRunnerQualifiedId, configType, serviceType, configRefreshUrl); + + // Assert + _runnerServer.Verify(x => x.RefreshRunnerConfigAsync(1, "credentials", It.IsAny(), It.IsAny()), Times.Once); + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("Runner credentials updated successfully")), It.IsAny()), Times.Once); + _configurationStore.Verify(x => x.SaveMigratedCredential(It.IsAny()), Times.Once); + Assert.True(hc.AllowAuthMigration); + } + } + } +} diff --git a/src/Test/L0/Listener/RunnerL0.cs b/src/Test/L0/Listener/RunnerL0.cs index 47df4de25..456f51cc9 100644 --- a/src/Test/L0/Listener/RunnerL0.cs +++ b/src/Test/L0/Listener/RunnerL0.cs @@ -1,13 +1,15 @@ -using GitHub.DistributedTask.WebApi; -using GitHub.Runner.Listener; -using GitHub.Runner.Listener.Configuration; -using Moq; -using System; +using System; using System.Collections.Generic; +using System.IO; using System.Threading; using System.Threading.Tasks; -using Xunit; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Listener; +using GitHub.Runner.Listener.Configuration; +using GitHub.Services.Common; using GitHub.Services.WebApi; +using Moq; +using Xunit; using Pipelines = GitHub.DistributedTask.Pipelines; namespace GitHub.Runner.Common.Tests.Listener @@ -23,6 +25,10 @@ namespace GitHub.Runner.Common.Tests.Listener private Mock _term; private Mock _configStore; private Mock _updater; + private Mock _acquireJobThrottler; + private Mock _credentialManager; + private Mock _actionsRunServer; + private Mock _runServer; public RunnerL0() { @@ -35,6 +41,10 @@ namespace GitHub.Runner.Common.Tests.Listener _term = new Mock(); _configStore = new Mock(); _updater = new Mock(); + _acquireJobThrottler = new Mock(); + _credentialManager = new Mock(); + _actionsRunServer = new Mock(); + _runServer = new Mock(); } private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string jobName) @@ -42,7 +52,7 @@ namespace GitHub.Runner.Common.Tests.Listener TaskOrchestrationPlanReference plan = new(); TimelineReference timeline = null; Guid jobId = Guid.NewGuid(); - return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + return new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); } private JobCancelMessage CreateJobCancelMessage() @@ -55,7 +65,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Trait("Level", "L0")] [Trait("Category", "Runner")] //process 2 new job messages, and one cancel message - public async void TestRunAsync() + public async Task TestRunAsync() { using (var hc = new TestHostContext(this)) { @@ -67,6 +77,7 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_promptManager.Object); hc.SetSingleton(_runnerServer.Object); hc.SetSingleton(_configStore.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); runner.Initialize(hc); var settings = new RunnerSettings { @@ -88,7 +99,7 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationManager.Setup(x => x.IsConfigured()) .Returns(true); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(true)); + .Returns(Task.FromResult(CreateSessionResult.Success)); _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) .Returns(async () => { @@ -126,7 +137,7 @@ namespace GitHub.Runner.Common.Tests.Listener //wait for the runner to run one job if (!await signalWorkerComplete.WaitAsync(2000)) { - Assert.True(false, $"{nameof(_messageListener.Object.GetNextMessageAsync)} was not invoked."); + Assert.Fail($"{nameof(_messageListener.Object.GetNextMessageAsync)} was not invoked."); } else { @@ -166,7 +177,7 @@ namespace GitHub.Runner.Common.Tests.Listener [MemberData(nameof(RunAsServiceTestData))] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestExecuteCommandForRunAsService(string[] args, bool configureAsService, Times expectedTimes) + public async Task TestExecuteCommandForRunAsService(string[] args, bool configureAsService, Times expectedTimes) { using (var hc = new TestHostContext(this)) { @@ -174,6 +185,8 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_promptManager.Object); hc.SetSingleton(_messageListener.Object); hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_runnerServer.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); var command = new CommandSettings(hc, args); @@ -184,7 +197,7 @@ namespace GitHub.Runner.Common.Tests.Listener _configStore.Setup(x => x.IsServiceConfigured()).Returns(configureAsService); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(false)); + .Returns(Task.FromResult(CreateSessionResult.Failure)); var runner = new Runner.Listener.Runner(); runner.Initialize(hc); @@ -197,7 +210,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestMachineProvisionerCLI() + public async Task TestMachineProvisionerCLI() { using (var hc = new TestHostContext(this)) { @@ -205,6 +218,8 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_promptManager.Object); hc.SetSingleton(_messageListener.Object); hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_runnerServer.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); var command = new CommandSettings(hc, new[] { "run" }); @@ -217,7 +232,7 @@ namespace GitHub.Runner.Common.Tests.Listener .Returns(false); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(false)); + .Returns(Task.FromResult(CreateSessionResult.Failure)); var runner = new Runner.Listener.Runner(); runner.Initialize(hc); @@ -230,7 +245,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestRunOnce() + public async Task TestRunOnce() { using (var hc = new TestHostContext(this)) { @@ -242,6 +257,7 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_promptManager.Object); hc.SetSingleton(_runnerServer.Object); hc.SetSingleton(_configStore.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); runner.Initialize(hc); var settings = new RunnerSettings { @@ -263,7 +279,7 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationManager.Setup(x => x.IsConfigured()) .Returns(true); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(true)); + .Returns(Task.FromResult(CreateSessionResult.Success)); _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) .Returns(async () => { @@ -305,8 +321,11 @@ namespace GitHub.Runner.Common.Tests.Listener await Task.WhenAny(runnerTask, Task.Delay(30000)); Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); - Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); - Assert.True(runnerTask.Result == Constants.Runner.ReturnCode.Success); + Assert.False(runnerTask.IsFaulted, runnerTask.Exception?.ToString()); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.Success, await runnerTask); + } _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once(), $"{nameof(_jobDispatcher.Object.Run)} was not invoked."); @@ -323,7 +342,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestRunOnceOnlyTakeOneJobMessage() + public async Task TestRunOnceOnlyTakeOneJobMessage() { using (var hc = new TestHostContext(this)) { @@ -335,6 +354,7 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_promptManager.Object); hc.SetSingleton(_runnerServer.Object); hc.SetSingleton(_configStore.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); runner.Initialize(hc); var settings = new RunnerSettings { @@ -363,7 +383,7 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationManager.Setup(x => x.IsConfigured()) .Returns(true); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(true)); + .Returns(Task.FromResult(CreateSessionResult.Success)); _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) .Returns(async () => { @@ -406,7 +426,10 @@ namespace GitHub.Runner.Common.Tests.Listener Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); - Assert.True(runnerTask.Result == Constants.Runner.ReturnCode.Success); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.Success, await runnerTask); + } _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once(), $"{nameof(_jobDispatcher.Object.Run)} was not invoked."); @@ -420,7 +443,7 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestRunOnceHandleUpdateMessage() + public async Task TestRunOnceHandleUpdateMessage() { using (var hc = new TestHostContext(this)) { @@ -433,6 +456,7 @@ namespace GitHub.Runner.Common.Tests.Listener hc.SetSingleton(_runnerServer.Object); hc.SetSingleton(_configStore.Object); hc.SetSingleton(_updater.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); runner.Initialize(hc); var settings = new RunnerSettings @@ -458,7 +482,7 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationManager.Setup(x => x.IsConfigured()) .Returns(true); _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) - .Returns(Task.FromResult(true)); + .Returns(Task.FromResult(CreateSessionResult.Success)); _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) .Returns(async () => { @@ -492,7 +516,10 @@ namespace GitHub.Runner.Common.Tests.Listener Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); - Assert.True(runnerTask.Result == Constants.Runner.ReturnCode.RunOnceRunnerUpdating); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.RunOnceRunnerUpdating, await runnerTask); + } _updater.Verify(x => x.SelfUpdate(It.IsAny(), It.IsAny(), false, It.IsAny()), Times.Once); _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Never()); @@ -506,13 +533,15 @@ namespace GitHub.Runner.Common.Tests.Listener [Fact] [Trait("Level", "L0")] [Trait("Category", "Runner")] - public async void TestRemoveLocalRunnerConfig() + public async Task TestRemoveLocalRunnerConfig() { using (var hc = new TestHostContext(this)) { hc.SetSingleton(_configurationManager.Object); hc.SetSingleton(_configStore.Object); hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); var command = new CommandSettings(hc, new[] { "remove", "--local" }); @@ -531,5 +560,521 @@ namespace GitHub.Runner.Common.Tests.Listener _configurationManager.Verify(x => x.DeleteLocalRunnerConfig(), Times.Once()); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task TestReportAuthMigrationTelemetry() + { + using (var hc = new TestHostContext(this)) + { + //Arrange + var runner = new Runner.Listener.Runner(); + hc.SetSingleton(_configurationManager.Object); + hc.SetSingleton(_jobNotification.Object); + hc.SetSingleton(_messageListener.Object); + hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_credentialManager.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); + hc.EnqueueInstance(_jobDispatcher.Object); + + runner.Initialize(hc); + var settings = new RunnerSettings + { + PoolId = 43242, + AgentId = 5678, + Ephemeral = true + }; + + var message1 = new TaskAgentMessage() + { + MessageId = 4234, + MessageType = "unknown" + }; + + var messages = new Queue(); + messages.Enqueue(message1); + _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(true)); + _configurationManager.Setup(x => x.LoadSettings()) + .Returns(settings); + _configurationManager.Setup(x => x.IsConfigured()) + .Returns(true); + _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) + .Returns(Task.FromResult(CreateSessionResult.Success)); + _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) + .Returns(async (CancellationToken token) => + { + hc.GetTrace().Info("Waiting for message"); + Assert.False(hc.AllowAuthMigration); + await Task.Delay(100, token); + + var traceFile = Path.GetTempFileName(); + File.Copy(hc.TraceFileName, traceFile, true); + Assert.DoesNotContain("Checking for auth migration telemetry to report", File.ReadAllText(traceFile)); + + hc.EnableAuthMigration("L0Test"); + hc.DeferAuthMigration(TimeSpan.FromSeconds(1), "L0Test"); + hc.EnableAuthMigration("L0Test"); + hc.DeferAuthMigration(TimeSpan.FromSeconds(1), "L0Test"); + + await Task.Delay(1000, token); + + hc.ShutdownRunner(ShutdownReason.UserCancelled); + + File.Copy(hc.TraceFileName, traceFile, true); + Assert.Contains("Checking for auth migration telemetry to report", File.ReadAllText(traceFile)); + + return messages.Dequeue(); + }); + _messageListener.Setup(x => x.DeleteSessionAsync()) + .Returns(Task.CompletedTask); + _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) + .Returns(Task.CompletedTask); + _jobNotification.Setup(x => x.StartClient(It.IsAny())) + .Callback(() => + { + + }); + + _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); + + _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(new TaskAgent())); + + //Act + var command = new CommandSettings(hc, new string[] { "run" }); + var returnCode = await runner.ExecuteCommand(command); + + //Assert + Assert.Equal(Constants.Runner.ReturnCode.Success, returnCode); + + _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); + _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); + _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); + _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.Once()); + + _runnerServer.Verify(x => x.UpdateAgentUpdateStateAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.Is(s => s.Contains("L0Test")), It.IsAny()), Times.Exactly(4)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task TestRunnerJobRequestMessageFromPipeline() + { + using (var hc = new TestHostContext(this)) + { + //Arrange + var runner = new Runner.Listener.Runner(); + hc.SetSingleton(_configurationManager.Object); + hc.SetSingleton(_jobNotification.Object); + hc.SetSingleton(_messageListener.Object); + hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_updater.Object); + hc.SetSingleton(_credentialManager.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); + hc.EnqueueInstance(_actionsRunServer.Object); + hc.EnqueueInstance(_jobDispatcher.Object); + + runner.Initialize(hc); + var settings = new RunnerSettings + { + PoolId = 43242, + AgentId = 5678, + Ephemeral = true, + ServerUrl = "https://github.com", + }; + + var message1 = new TaskAgentMessage() + { + Body = JsonUtility.ToString(new RunnerJobRequestRef() { BillingOwnerId = "github", RunnerRequestId = "999" }), + MessageId = 4234, + MessageType = JobRequestMessageTypes.RunnerJobRequest + }; + + var messages = new Queue(); + messages.Enqueue(message1); + _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(true)); + _configurationManager.Setup(x => x.LoadSettings()) + .Returns(settings); + _configurationManager.Setup(x => x.IsConfigured()) + .Returns(true); + _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) + .Returns(Task.FromResult(CreateSessionResult.Success)); + _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) + .Returns(async (CancellationToken token) => + { + if (0 == messages.Count) + { + await Task.Delay(2000, token); + } + + return messages.Dequeue(); + }); + _messageListener.Setup(x => x.DeleteSessionAsync()) + .Returns(Task.CompletedTask); + _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) + .Returns(Task.CompletedTask); + _jobNotification.Setup(x => x.StartClient(It.IsAny())) + .Callback(() => + { + + }); + _actionsRunServer.Setup(x => x.GetJobMessageAsync("999", It.IsAny())) + .Returns(Task.FromResult(CreateJobRequestMessage("test"))); + + _credentialManager.Setup(x => x.LoadCredentials(false)).Returns(new VssCredentials()); + + _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); + + var completedTask = new TaskCompletionSource(); + completedTask.SetResult(true); + _jobDispatcher.Setup(x => x.RunOnceJobCompleted).Returns(completedTask); + + //Act + var command = new CommandSettings(hc, new string[] { "run" }); + Task runnerTask = runner.ExecuteCommand(command); + + //Assert + //wait for the runner to exit with right return code + await Task.WhenAny(runnerTask, Task.Delay(30000)); + + Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); + Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.Success, await runnerTask); + } + + _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once()); + _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); + _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); + _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); + _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.Once()); + _credentialManager.Verify(x => x.LoadCredentials(false), Times.Once()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task TestRunnerJobRequestMessageFromRunService() + { + using (var hc = new TestHostContext(this)) + { + //Arrange + var runner = new Runner.Listener.Runner(); + hc.SetSingleton(_configurationManager.Object); + hc.SetSingleton(_jobNotification.Object); + hc.SetSingleton(_messageListener.Object); + hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_updater.Object); + hc.SetSingleton(_credentialManager.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); + hc.EnqueueInstance(_runServer.Object); + hc.EnqueueInstance(_jobDispatcher.Object); + + runner.Initialize(hc); + var settings = new RunnerSettings + { + PoolId = 43242, + AgentId = 5678, + Ephemeral = true, + ServerUrl = "https://github.com", + }; + + var message1 = new TaskAgentMessage() + { + Body = JsonUtility.ToString(new RunnerJobRequestRef() { BillingOwnerId = "github", RunnerRequestId = "999", RunServiceUrl = "https://run-service.com" }), + MessageId = 4234, + MessageType = JobRequestMessageTypes.RunnerJobRequest + }; + + var messages = new Queue(); + messages.Enqueue(message1); + _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(true)); + _configurationManager.Setup(x => x.LoadSettings()) + .Returns(settings); + _configurationManager.Setup(x => x.IsConfigured()) + .Returns(true); + _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) + .Returns(Task.FromResult(CreateSessionResult.Success)); + _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) + .Returns(async (CancellationToken token) => + { + if (0 == messages.Count) + { + await Task.Delay(2000, token); + } + + return messages.Dequeue(); + }); + _messageListener.Setup(x => x.DeleteSessionAsync()) + .Returns(Task.CompletedTask); + _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) + .Returns(Task.CompletedTask); + _jobNotification.Setup(x => x.StartClient(It.IsAny())) + .Callback(() => + { + + }); + _runServer.Setup(x => x.GetJobMessageAsync("999", "github", It.IsAny())) + .Returns(Task.FromResult(CreateJobRequestMessage("test"))); + + _credentialManager.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); + + var completedTask = new TaskCompletionSource(); + completedTask.SetResult(true); + _jobDispatcher.Setup(x => x.RunOnceJobCompleted).Returns(completedTask); + + //Act + var command = new CommandSettings(hc, new string[] { "run" }); + Task runnerTask = runner.ExecuteCommand(command); + + //Assert + //wait for the runner to exit with right return code + await Task.WhenAny(runnerTask, Task.Delay(30000)); + + Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); + Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.Success, await runnerTask); + } + + _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once()); + _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); + _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); + _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); + _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.Once()); + _credentialManager.Verify(x => x.LoadCredentials(true), Times.Once()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task TestRunnerJobRequestMessageFromRunService_AuthMigrationFallback() + { + using (var hc = new TestHostContext(this)) + { + //Arrange + var runner = new Runner.Listener.Runner(); + hc.SetSingleton(_configurationManager.Object); + hc.SetSingleton(_jobNotification.Object); + hc.SetSingleton(_messageListener.Object); + hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_updater.Object); + hc.SetSingleton(_credentialManager.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); + hc.EnqueueInstance(_jobDispatcher.Object); + hc.EnqueueInstance(_runServer.Object); + hc.EnqueueInstance(_runServer.Object); + + runner.Initialize(hc); + var settings = new RunnerSettings + { + PoolId = 43242, + AgentId = 5678, + Ephemeral = true, + ServerUrl = "https://github.com", + }; + + var message1 = new TaskAgentMessage() + { + Body = JsonUtility.ToString(new RunnerJobRequestRef() { BillingOwnerId = "github", RunnerRequestId = "999", RunServiceUrl = "https://run-service.com" }), + MessageId = 4234, + MessageType = JobRequestMessageTypes.RunnerJobRequest + }; + + var messages = new Queue(); + messages.Enqueue(message1); + messages.Enqueue(message1); + _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(true)); + _configurationManager.Setup(x => x.LoadSettings()) + .Returns(settings); + _configurationManager.Setup(x => x.IsConfigured()) + .Returns(true); + _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) + .Returns(Task.FromResult(CreateSessionResult.Success)); + _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) + .Returns(async (CancellationToken token) => + { + if (2 == messages.Count) + { + hc.EnableAuthMigration("L0Test"); + } + + if (0 == messages.Count) + { + await Task.Delay(2000, token); + } + + return messages.Dequeue(); + }); + _messageListener.Setup(x => x.DeleteSessionAsync()) + .Returns(Task.CompletedTask); + _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) + .Returns(Task.CompletedTask); + _jobNotification.Setup(x => x.StartClient(It.IsAny())) + .Callback(() => + { + + }); + + var throwError = true; + _runServer.Setup(x => x.GetJobMessageAsync("999", "github", It.IsAny())) + .Returns(() => + { + if (throwError) + { + Assert.True(hc.AllowAuthMigration); + throwError = false; + throw new NotSupportedException("some error"); + } + + return Task.FromResult(CreateJobRequestMessage("test")); + }); + + _credentialManager.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); + + var completedTask = new TaskCompletionSource(); + completedTask.SetResult(true); + _jobDispatcher.Setup(x => x.RunOnceJobCompleted).Returns(completedTask); + + //Act + var command = new CommandSettings(hc, new string[] { "run" }); + Task runnerTask = runner.ExecuteCommand(command); + + //Assert + //wait for the runner to exit with right return code + await Task.WhenAny(runnerTask, Task.Delay(30000)); + + Assert.True(runnerTask.IsCompleted, $"{nameof(runner.ExecuteCommand)} timed out."); + Assert.True(!runnerTask.IsFaulted, runnerTask.Exception?.ToString()); + if (runnerTask.IsCompleted) + { + Assert.Equal(Constants.Runner.ReturnCode.Success, await runnerTask); + } + + _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once()); + _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); + _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeast(2)); + _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.AtLeast(2)); + _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); + _credentialManager.Verify(x => x.LoadCredentials(true), Times.AtLeast(2)); + + Assert.False(hc.AllowAuthMigration); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Runner")] + public async Task TestRunnerEnableAuthMigrationByDefault() + { + using (var hc = new TestHostContext(this)) + { + //Arrange + var runner = new Runner.Listener.Runner(); + hc.SetSingleton(_configurationManager.Object); + hc.SetSingleton(_jobNotification.Object); + hc.SetSingleton(_messageListener.Object); + hc.SetSingleton(_promptManager.Object); + hc.SetSingleton(_configStore.Object); + hc.SetSingleton(_credentialManager.Object); + hc.SetSingleton(_runnerServer.Object); + hc.EnqueueInstance(_acquireJobThrottler.Object); + + runner.Initialize(hc); + var settings = new RunnerSettings + { + PoolId = 43242, + AgentId = 5678, + Ephemeral = true, + ServerUrl = "https://github.com", + }; + + var message1 = new TaskAgentMessage() + { + Body = JsonUtility.ToString(new RunnerJobRequestRef() { BillingOwnerId = "github", RunnerRequestId = "999", RunServiceUrl = "https://run-service.com" }), + MessageId = 4234, + MessageType = JobRequestMessageTypes.RunnerJobRequest + }; + + var messages = new Queue(); + messages.Enqueue(message1); + messages.Enqueue(message1); + _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(true)); + _configurationManager.Setup(x => x.LoadSettings()) + .Returns(settings); + _configurationManager.Setup(x => x.IsConfigured()) + .Returns(true); + _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) + .Returns(Task.FromResult(CreateSessionResult.Failure)); + _jobNotification.Setup(x => x.StartClient(It.IsAny())) + .Callback(() => + { + + }); + + var throwError = true; + _runServer.Setup(x => x.GetJobMessageAsync("999", "github", It.IsAny())) + .Returns(() => + { + if (throwError) + { + Assert.True(hc.AllowAuthMigration); + throwError = false; + throw new NotSupportedException("some error"); + } + + return Task.FromResult(CreateJobRequestMessage("test")); + }); + + _credentialManager.Setup(x => x.LoadCredentials(true)).Returns(new VssCredentials()); + + _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); + + var credData = new CredentialData() + { + Scheme = Constants.Configuration.OAuth, + }; + credData.Data["ClientId"] = "testClientId"; + credData.Data["AuthUrl"] = "https://github.com"; + credData.Data["EnableAuthMigrationByDefault"] = "true"; + _configStore.Setup(x => x.GetCredentials()).Returns(credData); + + Assert.False(hc.AllowAuthMigration); + + //Act + var command = new CommandSettings(hc, new string[] { "run" }); + var returnCode = await runner.ExecuteCommand(command); + + //Assert + Assert.Equal(Constants.Runner.ReturnCode.TerminatedError, returnCode); + + _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); + + Assert.True(hc.AllowAuthMigration); + } + } } } diff --git a/src/Test/L0/Listener/SelfUpdaterL0.cs b/src/Test/L0/Listener/SelfUpdaterL0.cs index 26ba65e71..be095ce90 100644 --- a/src/Test/L0/Listener/SelfUpdaterL0.cs +++ b/src/Test/L0/Listener/SelfUpdaterL0.cs @@ -107,8 +107,8 @@ namespace GitHub.Runner.Common.Tests.Listener hc.EnqueueInstance(p3); updater.Initialize(hc); - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => + _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((int p, ulong a, string s, string t, CancellationToken token) => { hc.GetTrace().Info(t); }) @@ -168,8 +168,8 @@ namespace GitHub.Runner.Common.Tests.Listener _runnerServer.Setup(x => x.GetPackageAsync("agent", BuildConstants.RunnerPackage.PackageName, "2.200.0", true, It.IsAny())) .Returns(Task.FromResult(new PackageMetadata() { Platform = BuildConstants.RunnerPackage.PackageName, Version = new PackageVersion("2.200.0"), DownloadUrl = _packageUrl })); - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => + _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((int p, ulong a, string s, string t, CancellationToken token) => { hc.GetTrace().Info(t); }) @@ -220,8 +220,8 @@ namespace GitHub.Runner.Common.Tests.Listener hc.EnqueueInstance(p3); updater.Initialize(hc); - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => + _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((int p, ulong a, string s, string t, CancellationToken token) => { hc.GetTrace().Info(t); }) @@ -273,8 +273,8 @@ namespace GitHub.Runner.Common.Tests.Listener hc.EnqueueInstance(p3); updater.Initialize(hc); - _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny())) - .Callback((int p, ulong a, string s, string t) => + _runnerServer.Setup(x => x.UpdateAgentUpdateStateAsync(1, 1, It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((int p, ulong a, string s, string t, CancellationToken token) => { hc.GetTrace().Info(t); }) diff --git a/src/Test/L0/ProcessExtensionL0.cs b/src/Test/L0/ProcessExtensionL0.cs index 9708c1495..d650b4889 100644 --- a/src/Test/L0/ProcessExtensionL0.cs +++ b/src/Test/L0/ProcessExtensionL0.cs @@ -27,9 +27,9 @@ namespace GitHub.Runner.Common.Tests try { #if OS_WINDOWS - string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node16\bin\node"); + string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node20\bin\node"); #else - string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node16/bin/node"); + string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node20/bin/node"); hc.EnqueueInstance(new ProcessInvokerWrapper()); #endif var startInfo = new ProcessStartInfo(node, "-e \"setTimeout(function(){{}}, 15 * 1000);\""); @@ -58,7 +58,7 @@ namespace GitHub.Runner.Common.Tests trace.Error(ex); } - Assert.True(false, "Fail to retrive process environment variable."); + Assert.Fail("Failed to retrieve process environment variable."); } finally { diff --git a/src/Test/L0/RunnerWebProxyL0.cs b/src/Test/L0/RunnerWebProxyL0.cs index 61fe68d18..5e339e0a3 100644 --- a/src/Test/L0/RunnerWebProxyL0.cs +++ b/src/Test/L0/RunnerWebProxyL0.cs @@ -65,7 +65,14 @@ namespace GitHub.Runner.Common.Tests } } - Assert.True(badCode.Count == 0, $"The following code is using Raw HttpClientHandler() which will not follow the proxy setting agent have. Please use HostContext.CreateHttpClientHandler() instead.\n {string.Join("\n", badCode)}"); + if (badCode.Count > 0) + { + Assert.Fail($"The following code is using Raw HttpClientHandler() which will not follow the proxy setting agent have. Please use HostContext.CreateHttpClientHandler() instead.\n {string.Join("\n", badCode)}"); + } + else + { + Assert.True(true); + } } [Fact] @@ -112,7 +119,14 @@ namespace GitHub.Runner.Common.Tests } } - Assert.True(badCode.Count == 0, $"The following code is using Raw HttpClient() which will not follow the proxy setting agent have. Please use New HttpClient(HostContext.CreateHttpClientHandler()) instead.\n {string.Join("\n", badCode)}"); + if (badCode.Count > 0) + { + Assert.Fail($"The following code is using Raw HttpClient() which will not follow the proxy setting agent have. Please use New HttpClient(HostContext.CreateHttpClientHandler()) instead.\n {string.Join("\n", badCode)}"); + } + else + { + Assert.True(true); + } } [Fact] diff --git a/src/Test/L0/Sdk/LaunchWebApi/LaunchHttpClientL0.cs b/src/Test/L0/Sdk/LaunchWebApi/LaunchHttpClientL0.cs new file mode 100644 index 000000000..bda56141c --- /dev/null +++ b/src/Test/L0/Sdk/LaunchWebApi/LaunchHttpClientL0.cs @@ -0,0 +1,126 @@ +using GitHub.Actions.RunService.WebApi; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Launch.Client; +using GitHub.Services.Launch.Contracts; +using Moq; +using Moq.Protected; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Xunit; + +namespace GitHub.Actions.RunService.WebApi.Tests +{ + public sealed class LaunchHttpClientL0 + { + [Fact] + public async Task GetResolveActionsDownloadInfoAsync_SuccessResponse() + { + var baseUrl = new Uri("https://api.github.com/"); + var planId = Guid.NewGuid(); + var jobId = Guid.NewGuid(); + var token = "fake-token"; + + var actionReferenceList = new ActionReferenceList + { + Actions = new List + { + new ActionReference + { + NameWithOwner = "owner1/action1", + Ref = "0123456789" + } + } + }; + + var responseContent = @"{ + ""actions"": { + ""owner1/action1@0123456789"": { + ""name"": ""owner1/action1"", + ""resolved_name"": ""owner1/action1"", + ""resolved_sha"": ""0123456789"", + ""version"": ""0123456789"", + ""zip_url"": ""https://github.com/owner1/action1/zip"", + ""tar_url"": ""https://github.com/owner1/action1/tar"" + } + } + }"; + + var httpResponse = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(responseContent, Encoding.UTF8, "application/json"), + RequestMessage = new HttpRequestMessage() + { + RequestUri = new Uri($"{baseUrl}actions/build/{planId}/jobs/{jobId}/runnerresolve/actions") + } + }; + + var mockHandler = new Mock(); + mockHandler.Protected().Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(httpResponse); + + var client = new LaunchHttpClient(baseUrl, mockHandler.Object, token, false); + var result = await client.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, CancellationToken.None); + + // Assert + Assert.NotNull(result); + Assert.NotEmpty(result.Actions); + Assert.Equal(actionReferenceList.Actions.Count, result.Actions.Count); + Assert.True(result.Actions.ContainsKey("owner1/action1@0123456789")); + } + + [Fact] + public async Task GetResolveActionsDownloadInfoAsync_UnprocessableEntityResponse() + { + var baseUrl = new Uri("https://api.github.com/"); + var planId = Guid.NewGuid(); + var jobId = Guid.NewGuid(); + var token = "fake-token"; + + var actionReferenceList = new ActionReferenceList + { + Actions = new List + { + new ActionReference + { + NameWithOwner = "owner1/action1", + Ref = "0123456789" + } + } + }; + + var responseContent = @"{ + ""errors"": { + ""owner1/invalid-action@0123456789"": { + ""message"": ""Unable to resolve action 'owner1/invalid-action@0123456789', repository not found"" + } + } + }"; + + var httpResponse = new HttpResponseMessage(HttpStatusCode.UnprocessableEntity) + { + Content = new StringContent(responseContent, Encoding.UTF8, "application/json"), + RequestMessage = new HttpRequestMessage() + { + RequestUri = new Uri($"{baseUrl}actions/build/{planId}/jobs/{jobId}/runnerresolve/actions") + } + }; + + var mockHandler = new Mock(); + mockHandler.Protected().Setup>("SendAsync", ItExpr.IsAny(), ItExpr.IsAny()) + .ReturnsAsync(httpResponse); + + var client = new LaunchHttpClient(baseUrl, mockHandler.Object, token, false); + + var exception = await Assert.ThrowsAsync( + () => client.GetResolveActionsDownloadInfoAsyncV2(planId, jobId, actionReferenceList, CancellationToken.None)); + + Assert.Contains("repository not found", exception.Message); + } + } +} \ No newline at end of file diff --git a/src/Test/L0/Sdk/RSWebApi/RunServiceHttpClientL0.cs b/src/Test/L0/Sdk/RSWebApi/RunServiceHttpClientL0.cs new file mode 100644 index 000000000..b4f3e54ac --- /dev/null +++ b/src/Test/L0/Sdk/RSWebApi/RunServiceHttpClientL0.cs @@ -0,0 +1,20 @@ +using GitHub.Actions.RunService.WebApi; +using Xunit; + +namespace GitHub.Actions.RunService.WebApi.Tests; + +public sealed class RunServiceHttpClientL0 +{ + [Fact] + public void Truncate() + { + TestTruncate(string.Empty.PadLeft(199, 'a'), string.Empty.PadLeft(199, 'a')); + TestTruncate(string.Empty.PadLeft(200, 'a'), string.Empty.PadLeft(200, 'a')); + TestTruncate(string.Empty.PadLeft(201, 'a'), string.Empty.PadLeft(200, 'a') + "[truncated]"); + } + + private void TestTruncate(string errorBody, string expected) + { + Assert.Equal(expected, RunServiceHttpClient.Truncate(errorBody)); + } +} diff --git a/src/Test/L0/TestHostContext.cs b/src/Test/L0/TestHostContext.cs index a3e484b14..c1cf69220 100644 --- a/src/Test/L0/TestHostContext.cs +++ b/src/Test/L0/TestHostContext.cs @@ -1,16 +1,15 @@ -using GitHub.Runner.Common.Util; -using System; +using System; using System.Collections.Concurrent; +using System.Collections.Generic; using System.Globalization; using System.IO; +using System.Net.Http.Headers; +using System.Reflection; using System.Runtime.CompilerServices; +using System.Runtime.Loader; using System.Threading; using System.Threading.Tasks; -using System.Runtime.Loader; -using System.Reflection; -using System.Collections.Generic; using GitHub.DistributedTask.Logging; -using System.Net.Http.Headers; using GitHub.Runner.Sdk; namespace GitHub.Runner.Common.Tests @@ -30,9 +29,12 @@ namespace GitHub.Runner.Common.Tests private string _tempDirectoryRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("D")); private StartupType _startupType; public event EventHandler Unloading; + public event EventHandler Delaying; + public event EventHandler AuthMigrationChanged; public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token; public ShutdownReason RunnerShutdownReason { get; private set; } public ISecretMasker SecretMasker => _secretMasker; + public TestHostContext(object testClass, [CallerMemberName] string testName = "") { ArgUtil.NotNull(testClass, nameof(testClass)); @@ -90,9 +92,19 @@ namespace GitHub.Runner.Common.Tests public RunnerWebProxy WebProxy => new(); + public bool AllowAuthMigration { get; set; } + public async Task Delay(TimeSpan delay, CancellationToken token) { - await Task.Delay(TimeSpan.Zero); + // Event callback + EventHandler handler = Delaying; + if (handler != null) + { + handler(this, new DelayEventArgs(delay, token)); + } + + // Delay 10ms + await Task.Delay(TimeSpan.FromMilliseconds(10)); } public T CreateService() where T : class, IRunnerService @@ -246,12 +258,24 @@ namespace GitHub.Runner.Common.Tests ".agent"); break; + case WellKnownConfigFile.MigratedRunner: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".agent_migrated"); + break; + case WellKnownConfigFile.Credentials: path = Path.Combine( GetDirectory(WellKnownDirectory.Root), ".credentials"); break; + case WellKnownConfigFile.MigratedCredentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credentials_migrated"); + break; + case WellKnownConfigFile.RSACredentials: path = Path.Combine( GetDirectory(WellKnownDirectory.Root), @@ -360,5 +384,37 @@ namespace GitHub.Runner.Common.Tests Unloading(this, null); } } + + public void LoadDefaultUserAgents() + { + return; + } + + public void EnableAuthMigration(string trace) + { + AllowAuthMigration = true; + AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace)); + } + + public void DeferAuthMigration(TimeSpan deferred, string trace) + { + AllowAuthMigration = false; + AuthMigrationChanged?.Invoke(this, new AuthMigrationEventArgs(trace)); + } + } + + public class DelayEventArgs : EventArgs + { + public DelayEventArgs( + TimeSpan delay, + CancellationToken token) + { + Delay = delay; + Token = token; + } + + public TimeSpan Delay { get; } + + public CancellationToken Token { get; } } } diff --git a/src/Test/L0/Util/IOUtilL0.cs b/src/Test/L0/Util/IOUtilL0.cs index 08d3e9773..e9478dff2 100644 --- a/src/Test/L0/Util/IOUtilL0.cs +++ b/src/Test/L0/Util/IOUtilL0.cs @@ -960,6 +960,33 @@ namespace GitHub.Runner.Common.Tests.Util } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ReplaceInvalidFileNameChars() + { + Assert.Equal(string.Empty, IOUtil.ReplaceInvalidFileNameChars(null)); + Assert.Equal(string.Empty, IOUtil.ReplaceInvalidFileNameChars(string.Empty)); + Assert.Equal("hello.txt", IOUtil.ReplaceInvalidFileNameChars("hello.txt")); +#if OS_WINDOWS + // Refer https://github.com/dotnet/runtime/blob/ce84f1d8a3f12711bad678a33efbc37b461f684f/src/libraries/System.Private.CoreLib/src/System/IO/Path.Windows.cs#L15 + Assert.Equal( + "1_ 2_ 3_ 4_ 5_ 6_ 7_ 8_ 9_ 10_ 11_ 12_ 13_ 14_ 15_ 16_ 17_ 18_ 19_ 20_ 21_ 22_ 23_ 24_ 25_ 26_ 27_ 28_ 29_ 30_ 31_ 32_ 33_ 34_ 35_ 36_ 37_ 38_ 39_ 40_ 41_", + IOUtil.ReplaceInvalidFileNameChars($"1\" 2< 3> 4| 5\0 6{(char)1} 7{(char)2} 8{(char)3} 9{(char)4} 10{(char)5} 11{(char)6} 12{(char)7} 13{(char)8} 14{(char)9} 15{(char)10} 16{(char)11} 17{(char)12} 18{(char)13} 19{(char)14} 20{(char)15} 21{(char)16} 22{(char)17} 23{(char)18} 24{(char)19} 25{(char)20} 26{(char)21} 27{(char)22} 28{(char)23} 29{(char)24} 30{(char)25} 31{(char)26} 32{(char)27} 33{(char)28} 34{(char)29} 35{(char)30} 36{(char)31} 37: 38* 39? 40\\ 41/")); +#else + // Refer https://github.com/dotnet/runtime/blob/ce84f1d8a3f12711bad678a33efbc37b461f684f/src/libraries/System.Private.CoreLib/src/System/IO/Path.Unix.cs#L12 + Assert.Equal("1_ 2_", IOUtil.ReplaceInvalidFileNameChars("1\0 2/")); +#endif + Assert.Equal("_leading", IOUtil.ReplaceInvalidFileNameChars("/leading")); + Assert.Equal("__consecutive leading", IOUtil.ReplaceInvalidFileNameChars("//consecutive leading")); + Assert.Equal("trailing_", IOUtil.ReplaceInvalidFileNameChars("trailing/")); + Assert.Equal("consecutive trailing__", IOUtil.ReplaceInvalidFileNameChars("consecutive trailing//")); + Assert.Equal("middle_middle", IOUtil.ReplaceInvalidFileNameChars("middle/middle")); + Assert.Equal("consecutive middle__consecutive middle", IOUtil.ReplaceInvalidFileNameChars("consecutive middle//consecutive middle")); + Assert.Equal("_leading_middle_trailing_", IOUtil.ReplaceInvalidFileNameChars("/leading/middle/trailing/")); + Assert.Equal("__consecutive leading__consecutive middle__consecutive trailing__", IOUtil.ReplaceInvalidFileNameChars("//consecutive leading//consecutive middle//consecutive trailing//")); + } + private static async Task CreateDirectoryReparsePoint(IHostContext context, string link, string target) { #if OS_WINDOWS diff --git a/src/Test/L0/Util/NodeUtilL0.cs b/src/Test/L0/Util/NodeUtilL0.cs new file mode 100644 index 000000000..599d23514 --- /dev/null +++ b/src/Test/L0/Util/NodeUtilL0.cs @@ -0,0 +1,120 @@ +using System; +using System.Collections.Generic; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public class NodeUtilL0 + { + // We're testing the logic with feature flags + [Theory] + [InlineData(false, false, false, false, "node20", false)] // Phase 1: No env vars + [InlineData(false, false, false, true, "node20", false)] // Phase 1: Allow unsecure (redundant) + [InlineData(false, false, true, false, "node24", false)] // Phase 1: Force node24 + [InlineData(false, false, true, true, "node20", true)] // Phase 1: Both flags (use phase default + warning) + [InlineData(false, true, false, false, "node24", false)] // Phase 2: No env vars + [InlineData(false, true, false, true, "node20", false)] // Phase 2: Allow unsecure + [InlineData(false, true, true, false, "node24", false)] // Phase 2: Force node24 (redundant) + [InlineData(false, true, true, true, "node24", true)] // Phase 2: Both flags (use phase default + warning) + [InlineData(true, false, false, false, "node24", false)] // Phase 3: Always Node 24 regardless of env vars + [InlineData(true, false, false, true, "node24", false)] // Phase 3: Always Node 24 regardless of env vars + [InlineData(true, false, true, false, "node24", false)] // Phase 3: Always Node 24 regardless of env vars + [InlineData(true, false, true, true, "node24", false)] // Phase 3: Always Node 24 regardless of env vars, no warnings in Phase 3 + public void TestNodeVersionLogic(bool requireNode24, bool useNode24ByDefault, bool forceNode24, bool allowUnsecureNode, string expectedVersion, bool expectWarning) + { + try + { + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.ForceNode24Variable, forceNode24 ? "true" : null); + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, allowUnsecureNode ? "true" : null); + + // Call the actual method + var (actualVersion, warningMessage) = NodeUtil.DetermineActionsNodeVersion(null, useNode24ByDefault, requireNode24); + + // Assert + Assert.Equal(expectedVersion, actualVersion); + + if (expectWarning) + { + Assert.NotNull(warningMessage); + Assert.Contains("Both", warningMessage); + Assert.Contains("are set to true", warningMessage); + } + else + { + Assert.Null(warningMessage); + } + } + finally + { + // Cleanup + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.ForceNode24Variable, null); + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, null); + } + } + + [Theory] + [InlineData(false, false, false, false, false, true, "node20", false)] // Phase 1: System env: none, Workflow env: allow=true + [InlineData(false, false, true, false, false, false, "node24", false)] // Phase 1: System env: force node24, Workflow env: none + [InlineData(false, true, false, false, true, false, "node24", false)] // Phase 1: System env: none, Workflow env: force node24 + [InlineData(false, false, false, true, false, true, "node20", false)] // Phase 1: System env: allow=true, Workflow env: allow=true (workflow takes precedence) + [InlineData(false, false, true, true, false, false, "node20", true)] // Phase 1: System env: both true, Workflow env: none (use phase default + warning) + [InlineData(false, false, false, false, true, true, "node20", true)] // Phase 1: System env: none, Workflow env: both (use phase default + warning) + [InlineData(true, false, false, false, false, false, "node24", false)] // Phase 2: System env: none, Workflow env: none + [InlineData(true, false, false, true, false, false, "node20", false)] // Phase 2: System env: allow=true, Workflow env: none + [InlineData(true, false, false, false, false, true, "node20", false)] // Phase 2: System env: none, Workflow env: allow unsecure + [InlineData(true, false, true, false, false, true, "node20", false)] // Phase 2: System env: force node24, Workflow env: allow unsecure + [InlineData(true, false, true, true, false, false, "node24", true)] // Phase 2: System env: both true, Workflow env: none (use phase default + warning) + [InlineData(true, false, false, false, true, true, "node24", true)] // Phase 2: System env: none, Workflow env: both (phase default + warning) + [InlineData(false, true, false, false, false, true, "node24", false)] // Phase 3: System env: none, Workflow env: allow=true (always Node 24 in Phase 3) + [InlineData(false, true, true, true, false, false, "node24", false)] // Phase 3: System env: both true, Workflow env: none (always Node 24 in Phase 3, no warning) + [InlineData(false, true, false, false, true, true, "node24", false)] // Phase 3: System env: none, Workflow env: both (always Node 24 in Phase 3, no warning) + public void TestNodeVersionLogicWithWorkflowEnvironment(bool useNode24ByDefault, bool requireNode24, + bool systemForceNode24, bool systemAllowUnsecure, + bool workflowForceNode24, bool workflowAllowUnsecure, + string expectedVersion, bool expectWarning) + { + try + { + // Set system environment variables + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.ForceNode24Variable, systemForceNode24 ? "true" : null); + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, systemAllowUnsecure ? "true" : null); + + // Set workflow environment variables + var workflowEnv = new Dictionary(); + if (workflowForceNode24) + { + workflowEnv[Constants.Runner.NodeMigration.ForceNode24Variable] = "true"; + } + if (workflowAllowUnsecure) + { + workflowEnv[Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable] = "true"; + } + + // Call the actual method with our test parameters + var (actualVersion, warningMessage) = NodeUtil.DetermineActionsNodeVersion(workflowEnv, useNode24ByDefault, requireNode24); + + // Assert + Assert.Equal(expectedVersion, actualVersion); + + if (expectWarning) + { + Assert.NotNull(warningMessage); + Assert.Contains("Both", warningMessage); + Assert.Contains("are set to true", warningMessage); + } + else + { + Assert.Null(warningMessage); + } + } + finally + { + // Cleanup + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.ForceNode24Variable, null); + Environment.SetEnvironmentVariable(Constants.Runner.NodeMigration.AllowUnsecureNodeVersionVariable, null); + } + } + } +} diff --git a/src/Test/L0/Util/WhichUtilL0.cs b/src/Test/L0/Util/WhichUtilL0.cs index 90d32c466..9a6443d1f 100644 --- a/src/Test/L0/Util/WhichUtilL0.cs +++ b/src/Test/L0/Util/WhichUtilL0.cs @@ -212,210 +212,5 @@ namespace GitHub.Runner.Common.Tests.Util File.Delete(brokenSymlink); Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue); } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void UseWhich2FindGit() - { - using (TestHostContext hc = new(this)) - { - //Arrange - Tracing trace = hc.GetTrace(); - - // Act. - string gitPath = WhichUtil.Which2("git", trace: trace); - - trace.Info($"Which(\"git\") returns: {gitPath ?? string.Empty}"); - - // Assert. - Assert.True(!string.IsNullOrEmpty(gitPath) && File.Exists(gitPath), $"Unable to find Git through: {nameof(WhichUtil.Which)}"); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2ReturnsNullWhenNotFound() - { - using (TestHostContext hc = new(this)) - { - //Arrange - Tracing trace = hc.GetTrace(); - - // Act. - string nosuch = WhichUtil.Which2("no-such-file-cf7e351f", trace: trace); - - trace.Info($"result: {nosuch ?? string.Empty}"); - - // Assert. - Assert.True(string.IsNullOrEmpty(nosuch), "Path should not be resolved"); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2ThrowsWhenRequireAndNotFound() - { - using (TestHostContext hc = new(this)) - { - //Arrange - Tracing trace = hc.GetTrace(); - - // Act. - try - { - WhichUtil.Which2("no-such-file-cf7e351f", require: true, trace: trace); - throw new Exception("which should have thrown"); - } - catch (FileNotFoundException ex) - { - Assert.Equal("no-such-file-cf7e351f", ex.FileName); - } - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2HandleFullyQualifiedPath() - { - using (TestHostContext hc = new(this)) - { - //Arrange - Tracing trace = hc.GetTrace(); - - // Act. - var gitPath = WhichUtil.Which2("git", require: true, trace: trace); - var gitPath2 = WhichUtil.Which2(gitPath, require: true, trace: trace); - - // Assert. - Assert.Equal(gitPath, gitPath2); - } - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2HandlesSymlinkToTargetFullPath() - { - // Arrange - using TestHostContext hc = new TestHostContext(this); - Tracing trace = hc.GetTrace(); - string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable); -#if OS_WINDOWS - string newValue = oldValue + @$";{Path.GetTempPath()}"; - string symlinkName = $"symlink-{Guid.NewGuid()}"; - string symlink = Path.GetTempPath() + $"{symlinkName}.exe"; - string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}.exe"; -#else - string newValue = oldValue + @$":{Path.GetTempPath()}"; - string symlinkName = $"symlink-{Guid.NewGuid()}"; - string symlink = Path.GetTempPath() + $"{symlinkName}"; - string target = Path.GetTempPath() + $"target-{Guid.NewGuid()}"; -#endif - - Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue); - - - using (File.Create(target)) - { - File.CreateSymbolicLink(symlink, target); - - // Act. - var result = WhichUtil.Which2(symlinkName, require: true, trace: trace); - - // Assert - Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find symlink through: {nameof(WhichUtil.Which)}"); - - } - - - // Cleanup - File.Delete(symlink); - File.Delete(target); - Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue); - - } - - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2HandlesSymlinkToTargetRelativePath() - { - // Arrange - using TestHostContext hc = new TestHostContext(this); - Tracing trace = hc.GetTrace(); - string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable); -#if OS_WINDOWS - string newValue = oldValue + @$";{Path.GetTempPath()}"; - string symlinkName = $"symlink-{Guid.NewGuid()}"; - string symlink = Path.GetTempPath() + $"{symlinkName}.exe"; - string targetName = $"target-{Guid.NewGuid()}.exe"; - string target = Path.GetTempPath() + targetName; -#else - string newValue = oldValue + @$":{Path.GetTempPath()}"; - string symlinkName = $"symlink-{Guid.NewGuid()}"; - string symlink = Path.GetTempPath() + $"{symlinkName}"; - string targetName = $"target-{Guid.NewGuid()}"; - string target = Path.GetTempPath() + targetName; -#endif - Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue); - - - using (File.Create(target)) - { - File.CreateSymbolicLink(symlink, targetName); - - // Act. - var result = WhichUtil.Which2(symlinkName, require: true, trace: trace); - - // Assert - Assert.True(!string.IsNullOrEmpty(result) && File.Exists(result), $"Unable to find {symlinkName} through: {nameof(WhichUtil.Which)}"); - } - - // Cleanup - File.Delete(symlink); - File.Delete(target); - Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue); - - } - [Fact] - [Trait("Level", "L0")] - [Trait("Category", "Common")] - public void Which2ThrowsWhenSymlinkBroken() - { - // Arrange - using TestHostContext hc = new TestHostContext(this); - Tracing trace = hc.GetTrace(); - string oldValue = Environment.GetEnvironmentVariable(PathUtil.PathVariable); - -#if OS_WINDOWS - string newValue = oldValue + @$";{Path.GetTempPath()}"; - string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}"; - string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}.exe"; -#else - string newValue = oldValue + @$":{Path.GetTempPath()}"; - string brokenSymlinkName = $"broken-symlink-{Guid.NewGuid()}"; - string brokenSymlink = Path.GetTempPath() + $"{brokenSymlinkName}"; -#endif - - - string target = "no-such-file-cf7e351f"; - Environment.SetEnvironmentVariable(PathUtil.PathVariable, newValue); - - File.CreateSymbolicLink(brokenSymlink, target); - - // Act. - var exception = Assert.Throws(() => WhichUtil.Which2(brokenSymlinkName, require: true, trace: trace)); - - // Assert - Assert.Equal(brokenSymlinkName, exception.FileName); - - // Cleanup - File.Delete(brokenSymlink); - Environment.SetEnvironmentVariable(PathUtil.PathVariable, oldValue); - } } } diff --git a/src/Test/L0/Worker/ActionCommandManagerL0.cs b/src/Test/L0/Worker/ActionCommandManagerL0.cs index 693c6f025..3a1f8f70f 100644 --- a/src/Test/L0/Worker/ActionCommandManagerL0.cs +++ b/src/Test/L0/Worker/ActionCommandManagerL0.cs @@ -232,7 +232,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, diff --git a/src/Test/L0/Worker/ActionManagerL0.cs b/src/Test/L0/Worker/ActionManagerL0.cs index c487ea55e..328c5b5f6 100644 --- a/src/Test/L0/Worker/ActionManagerL0.cs +++ b/src/Test/L0/Worker/ActionManagerL0.cs @@ -382,8 +382,6 @@ runs: } }; - _ec.Object.Global.Variables.Set("DistributedTask.UseActionArchiveCache", bool.TrueString); - //Act await _actionManager.PrepareActionsAsync(_ec.Object, actions); @@ -462,7 +460,7 @@ runs: //Act var steps = (await _actionManager.PrepareActionsAsync(_ec.Object, actions)).ContainerSetupSteps; - Assert.True(steps.Count == 0); + Assert.Equal(0, steps.Count); } finally { @@ -917,7 +915,7 @@ runs: var steps = (await _actionManager.PrepareActionsAsync(_ec.Object, actions)).ContainerSetupSteps; // node.js based action doesn't need any extra steps to build/pull containers. - Assert.True(steps.Count == 0); + Assert.Equal(0, steps.Count); } finally { @@ -1053,7 +1051,7 @@ runs: var steps = (await _actionManager.PrepareActionsAsync(_ec.Object, actions)).ContainerSetupSteps; // node.js based action doesn't need any extra steps to build/pull containers. - Assert.True(steps.Count == 0); + Assert.Equal(0, steps.Count); var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositeBasic.completed"); Assert.True(File.Exists(watermarkFile)); // Comes from the composite action @@ -1247,7 +1245,7 @@ runs: // Assert. Assert.NotNull(definition); Assert.NotNull(definition.Data); - Assert.True(definition.Data.Execution.ExecutionType == ActionExecutionType.Script); + Assert.Equal(ActionExecutionType.Script, definition.Data.Execution.ExecutionType); } finally { @@ -1661,6 +1659,76 @@ runs: Teardown(); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsNode24ActionDefinition() + { + try + { + // Arrange. + Setup(); + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node24' + main: 'task.js' +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull(definition.Data.Execution as NodeJSActionExecutionData); + Assert.Equal("task.js", (definition.Data.Execution as NodeJSActionExecutionData).Script); + Assert.Equal("node24", (definition.Data.Execution as NodeJSActionExecutionData).NodeVersion); + } + finally + { + Teardown(); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] @@ -2375,10 +2443,6 @@ runs: _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); _ec.Setup(x => x.Root).Returns(new GitHub.Runner.Worker.ExecutionContext()); var variables = new Dictionary(); - if (enableComposite) - { - variables["DistributedTask.EnableCompositeActions"] = "true"; - } _ec.Object.Global.Variables = new Variables(_hc, variables); _ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData()); _ec.Setup(x => x.ExpressionFunctions).Returns(new List()); @@ -2417,8 +2481,8 @@ runs: }); _launchServer = new Mock(); - _launchServer.Setup(x => x.ResolveActionsDownloadInfoAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) - .Returns((Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) => + _launchServer.Setup(x => x.ResolveActionsDownloadInfoAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns((Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken, bool displayHelpfulActionsDownloadErrors) => { var result = new ActionDownloadInfoCollection { Actions = new Dictionary() }; foreach (var action in actions.Actions) diff --git a/src/Test/L0/Worker/ActionManifestManagerL0.cs b/src/Test/L0/Worker/ActionManifestManagerL0.cs index 385ae9463..dae75c8f6 100644 --- a/src/Test/L0/Worker/ActionManifestManagerL0.cs +++ b/src/Test/L0/Worker/ActionManifestManagerL0.cs @@ -502,6 +502,49 @@ namespace GitHub.Runner.Common.Tests.Worker } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_Node24Action() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "node24action.yml")); + + //Assert + Assert.Equal("Hello World", result.Name); + Assert.Equal("Greet the world and record the time", result.Description); + Assert.Equal(2, result.Inputs.Count); + Assert.Equal("greeting", result.Inputs[0].Key.AssertString("key").Value); + Assert.Equal("Hello", result.Inputs[0].Value.AssertString("value").Value); + Assert.Equal("entryPoint", result.Inputs[1].Key.AssertString("key").Value); + Assert.Equal("", result.Inputs[1].Value.AssertString("value").Value); + Assert.Equal(1, result.Deprecated.Count); + + Assert.True(result.Deprecated.ContainsKey("greeting")); + result.Deprecated.TryGetValue("greeting", out string value); + Assert.Equal("This property has been deprecated", value); + + Assert.Equal(ActionExecutionType.NodeJS, result.Execution.ExecutionType); + + var nodeAction = result.Execution as NodeJSActionExecutionData; + + Assert.Equal("main.js", nodeAction.Script); + Assert.Equal("node24", nodeAction.NodeVersion); + } + finally + { + Teardown(); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] @@ -757,8 +800,8 @@ namespace GitHub.Runner.Common.Tests.Worker //Assert var err = Assert.Throws(() => actionManifest.Load(_ec.Object, action_path)); - Assert.Contains($"Fail to load {action_path}", err.Message); - _ec.Verify(x => x.AddIssue(It.Is(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16' or 'node20'.")), It.IsAny()), Times.Once); + Assert.Contains($"Failed to load {action_path}", err.Message); + _ec.Verify(x => x.AddIssue(It.Is(s => s.Message.Contains("Missing 'using' value. 'using' requires 'composite', 'docker', 'node12', 'node16', 'node20' or 'node24'.")), It.IsAny()), Times.Once); } finally { diff --git a/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs b/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs index 19f956fa8..185f44b38 100644 --- a/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs +++ b/src/Test/L0/Worker/CreateStepSummaryCommandL0.cs @@ -193,7 +193,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "Summary Job"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, diff --git a/src/Test/L0/Worker/ExecutionContextL0.cs b/src/Test/L0/Worker/ExecutionContextL0.cs index 91068d300..2f28f797f 100644 --- a/src/Test/L0/Worker/ExecutionContextL0.cs +++ b/src/Test/L0/Worker/ExecutionContextL0.cs @@ -29,7 +29,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -106,7 +106,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -162,7 +162,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -216,7 +216,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -271,7 +271,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -322,7 +322,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -373,7 +373,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -471,7 +471,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -555,7 +555,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -610,7 +610,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -653,7 +653,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -677,7 +677,7 @@ namespace GitHub.Runner.Common.Tests.Worker ec.InitializeJob(jobRequest, CancellationToken.None); ec.Start(); - ec.StepTelemetry.Type = "node16"; + ec.StepTelemetry.Type = "node20"; ec.StepTelemetry.Action = "actions/checkout"; ec.StepTelemetry.Ref = "v2"; ec.StepTelemetry.IsEmbedded = false; @@ -695,7 +695,7 @@ namespace GitHub.Runner.Common.Tests.Worker // Assert. Assert.Equal(1, ec.Global.StepsTelemetry.Count); - Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type); + Assert.Equal("node20", ec.Global.StepsTelemetry.Single().Type); Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action); Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref); Assert.Equal(TaskResult.Succeeded, ec.Global.StepsTelemetry.Single().Result); @@ -717,7 +717,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -746,7 +746,7 @@ namespace GitHub.Runner.Common.Tests.Worker var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true); embeddedStep.Start(); - embeddedStep.StepTelemetry.Type = "node16"; + embeddedStep.StepTelemetry.Type = "node20"; embeddedStep.StepTelemetry.Action = "actions/checkout"; embeddedStep.StepTelemetry.Ref = "v2"; @@ -758,7 +758,7 @@ namespace GitHub.Runner.Common.Tests.Worker // Assert. Assert.Equal(1, ec.Global.StepsTelemetry.Count); - Assert.Equal("node16", ec.Global.StepsTelemetry.Single().Type); + Assert.Equal("node20", ec.Global.StepsTelemetry.Single().Type); Assert.Equal("actions/checkout", ec.Global.StepsTelemetry.Single().Action); Assert.Equal("v2", ec.Global.StepsTelemetry.Single().Ref); Assert.Equal(ActionRunStage.Main.ToString(), ec.Global.StepsTelemetry.Single().Stage); @@ -773,6 +773,82 @@ namespace GitHub.Runner.Common.Tests.Worker [Trait("Level", "L0")] [Trait("Category", "Worker")] public void PublishStepResult_EmbeddedStep() + { + using (TestHostContext hc = CreateTestContext()) + { + // Job request + TaskOrchestrationPlanReference plan = new(); + TimelineReference timeline = new(); + Guid jobId = Guid.NewGuid(); + string jobName = "some job name"; + var variables = new Dictionary() + { + ["RunService.FixEmbeddedIssues"] = new VariableValue("true"), + }; + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, variables, new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); + jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() + { + Alias = Pipelines.PipelineConstants.SelfAlias, + Id = "github", + Version = "sha1" + }); + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + + // Mocks + var pagingLogger = new Mock(); + var pagingLogger2 = new Mock(); + var jobServerQueue = new Mock(); + jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.IsAny())); + hc.EnqueueInstance(pagingLogger.Object); + hc.EnqueueInstance(pagingLogger2.Object); + hc.SetSingleton(jobServerQueue.Object); + + // Job context + var jobContext = new Runner.Worker.ExecutionContext(); + jobContext.Initialize(hc); + jobContext.InitializeJob(jobRequest, CancellationToken.None); + jobContext.Start(); + + // Step 1 context + var step1 = jobContext.CreateChild(Guid.NewGuid(), "my_step", "my_step", null, null, ActionRunStage.Main); + step1.Start(); + + // Embedded step 1a context + var embeddedStep1a = step1.CreateEmbeddedChild(null, null, Guid.NewGuid(), ActionRunStage.Main); + embeddedStep1a.Start(); + embeddedStep1a.StepTelemetry.Type = "node20"; + embeddedStep1a.StepTelemetry.Action = "actions/checkout"; + embeddedStep1a.StepTelemetry.Ref = "v2"; + embeddedStep1a.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }, ExecutionContextLogOptions.Default); + embeddedStep1a.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }, ExecutionContextLogOptions.Default); + embeddedStep1a.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice" }, ExecutionContextLogOptions.Default); + embeddedStep1a.Complete(); + + // Embedded step 1b context + var embeddedStep1b = step1.CreateEmbeddedChild(null, null, Guid.NewGuid(), ActionRunStage.Main); + embeddedStep1b.Start(); + embeddedStep1b.StepTelemetry.Type = "node20"; + embeddedStep1b.StepTelemetry.Action = "actions/checkout"; + embeddedStep1b.StepTelemetry.Ref = "v2"; + embeddedStep1b.AddIssue(new Issue() { Type = IssueType.Error, Message = "error 2" }, ExecutionContextLogOptions.Default); + embeddedStep1b.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning 2" }, ExecutionContextLogOptions.Default); + embeddedStep1b.AddIssue(new Issue() { Type = IssueType.Notice, Message = "notice 2" }, ExecutionContextLogOptions.Default); + embeddedStep1b.Complete(); + + step1.Complete(); + + // Assert + Assert.Equal(3, jobContext.Global.StepsResult.Count); + Assert.Equal(0, jobContext.Global.StepsResult[0].Annotations.Count); + Assert.Equal(0, jobContext.Global.StepsResult[1].Annotations.Count); + Assert.Equal(6, jobContext.Global.StepsResult[2].Annotations.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void PublishStepResult_EmbeddedStep_Legacy() { using (TestHostContext hc = CreateTestContext()) { @@ -781,7 +857,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -807,10 +883,10 @@ namespace GitHub.Runner.Common.Tests.Worker ec.InitializeJob(jobRequest, CancellationToken.None); ec.Start(); - var embeddedStep = ec.CreateChild(Guid.NewGuid(), "action_1_pre", "action_1_pre", null, null, ActionRunStage.Main, isEmbedded: true); + var embeddedStep = ec.CreateEmbeddedChild(null, null, Guid.NewGuid(), ActionRunStage.Main); embeddedStep.Start(); - embeddedStep.StepTelemetry.Type = "node16"; + embeddedStep.StepTelemetry.Type = "node20"; embeddedStep.StepTelemetry.Action = "actions/checkout"; embeddedStep.StepTelemetry.Ref = "v2"; @@ -969,7 +1045,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -1014,7 +1090,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new TimelineReference(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -1057,7 +1133,7 @@ namespace GitHub.Runner.Common.Tests.Worker TimelineReference timeline = new TimelineReference(); Guid jobId = Guid.NewGuid(); string jobName = "some job name"; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() { Alias = Pipelines.PipelineConstants.SelfAlias, @@ -1092,6 +1168,77 @@ namespace GitHub.Runner.Common.Tests.Worker } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void InitializeJob_HydratesJobContextWithCheckRunId() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange: Create a job request message and make sure the feature flag is enabled + var variables = new Dictionary() + { + [Constants.Runner.Features.AddCheckRunIdToJobContext] = new VariableValue("true"), + }; + var jobRequest = new Pipelines.AgentJobRequestMessage(new TaskOrchestrationPlanReference(), new TimelineReference(), Guid.NewGuid(), "some job name", "some job name", null, null, null, variables, new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); + var pagingLogger = new Moq.Mock(); + var jobServerQueue = new Moq.Mock(); + hc.EnqueueInstance(pagingLogger.Object); + hc.SetSingleton(jobServerQueue.Object); + var ec = new Runner.Worker.ExecutionContext(); + ec.Initialize(hc); + + // Arrange: Add check_run_id to the job context + var jobContext = new Pipelines.ContextData.DictionaryContextData(); + jobContext["check_run_id"] = new NumberContextData(123456); + jobRequest.ContextData["job"] = jobContext; + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + + // Act + ec.InitializeJob(jobRequest, CancellationToken.None); + + // Assert + Assert.NotNull(ec.JobContext); + Assert.Equal(123456, ec.JobContext.CheckRunId); + } + } + + // TODO: this test can be deleted when `AddCheckRunIdToJobContext` is fully rolled out + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void InitializeJob_HydratesJobContextWithCheckRunId_FeatureFlagDisabled() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange: Create a job request message and make sure the feature flag is disabled + var variables = new Dictionary() + { + [Constants.Runner.Features.AddCheckRunIdToJobContext] = new VariableValue("false"), + }; + var jobRequest = new Pipelines.AgentJobRequestMessage(new TaskOrchestrationPlanReference(), new TimelineReference(), Guid.NewGuid(), "some job name", "some job name", null, null, null, variables, new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, null, null); + var pagingLogger = new Moq.Mock(); + var jobServerQueue = new Moq.Mock(); + hc.EnqueueInstance(pagingLogger.Object); + hc.SetSingleton(jobServerQueue.Object); + var ec = new Runner.Worker.ExecutionContext(); + ec.Initialize(hc); + + // Arrange: Add check_run_id to the job context + var jobContext = new Pipelines.ContextData.DictionaryContextData(); + jobContext["check_run_id"] = new NumberContextData(123456); + jobRequest.ContextData["job"] = jobContext; + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + + // Act + ec.InitializeJob(jobRequest, CancellationToken.None); + + // Assert + Assert.NotNull(ec.JobContext); + Assert.Null(ec.JobContext.CheckRunId); // with the feature flag disabled we should not have added a CheckRunId to the JobContext + } + } + private bool ExpressionValuesAssertEqual(DictionaryContextData expect, DictionaryContextData actual) { foreach (var key in expect.Keys.ToList()) diff --git a/src/Test/L0/Worker/HandlerFactoryL0.cs b/src/Test/L0/Worker/HandlerFactoryL0.cs index 0a9552d99..37981e46a 100644 --- a/src/Test/L0/Worker/HandlerFactoryL0.cs +++ b/src/Test/L0/Worker/HandlerFactoryL0.cs @@ -30,9 +30,10 @@ namespace GitHub.Runner.Common.Tests.Worker [Theory] [Trait("Level", "L0")] [Trait("Category", "Worker")] - [InlineData("node12", "node16")] - [InlineData("node16", "node16")] + [InlineData("node12", "node20")] + [InlineData("node16", "node20")] [InlineData("node20", "node20")] + [InlineData("node24", "node24")] public void IsNodeVersionUpgraded(string inputVersion, string expectedVersion) { using (TestHostContext hc = CreateTestContext()) @@ -41,7 +42,7 @@ namespace GitHub.Runner.Common.Tests.Worker var hf = new HandlerFactory(); hf.Initialize(hc); - // Server Feature Flag + // Setup variables var variables = new Dictionary(); Variables serverVariables = new(hc, variables); @@ -70,7 +71,49 @@ namespace GitHub.Runner.Common.Tests.Worker // Assert. Assert.Equal(expectedVersion, handler.Data.NodeVersion); - Environment.SetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, null); + } + } + + + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Node24ExplicitlyRequested_HonoredByDefault() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var hf = new HandlerFactory(); + hf.Initialize(hc); + + // Basic variables setup + var variables = new Dictionary(); + Variables serverVariables = new(hc, variables); + + _ec.Setup(x => x.Global).Returns(new GlobalContext() + { + Variables = serverVariables, + EnvironmentVariables = new Dictionary() + }); + + // Act - Node 24 explicitly requested in action.yml + var data = new NodeJSActionExecutionData(); + data.NodeVersion = "node24"; + var handler = hf.Create( + _ec.Object, + new ScriptReference(), + new Mock().Object, + data, + new Dictionary(), + new Dictionary(), + new Variables(hc, new Dictionary()), + "", + new List() + ) as INodeScriptActionHandler; + + // Assert - should be node24 as requested + Assert.Equal("node24", handler.Data.NodeVersion); } } } diff --git a/src/Test/L0/Worker/Handlers/NodeHandlerL0.cs b/src/Test/L0/Worker/Handlers/NodeHandlerL0.cs new file mode 100644 index 000000000..78c4053f1 --- /dev/null +++ b/src/Test/L0/Worker/Handlers/NodeHandlerL0.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Handlers; +using Moq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker.Handlers +{ + public sealed class NodeHandlerL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void NodeJSActionExecutionDataSupportsNode24() + { + // Create NodeJSActionExecutionData with node24 + var nodeJSData = new NodeJSActionExecutionData + { + NodeVersion = "node24", + Script = "test.js" + }; + + // Act & Assert + Assert.Equal("node24", nodeJSData.NodeVersion); + Assert.Equal(ActionExecutionType.NodeJS, nodeJSData.ExecutionType); + } + } +} diff --git a/src/Test/L0/Worker/IssueMatcherL0.cs b/src/Test/L0/Worker/IssueMatcherL0.cs index 777772a84..177dd6de2 100644 --- a/src/Test/L0/Worker/IssueMatcherL0.cs +++ b/src/Test/L0/Worker/IssueMatcherL0.cs @@ -896,5 +896,173 @@ namespace GitHub.Runner.Common.Tests.Worker Assert.Equal("not-working", match.Message); Assert.Equal("my-project.proj", match.FromPath); } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_SinglePattern_DefaultFromPath() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""fromPath"": ""subdir/default-project.csproj"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) line:(.+) column:(.+) severity:(.+) code:(.+) message:(.+)$"", + ""file"": 1, + ""line"": 2, + ""column"": 3, + ""severity"": 4, + ""code"": 5, + ""message"": 6 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + + var match = matcher.Match("file:my-file.cs line:123 column:45 severity:real-bad code:uh-oh message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + Assert.Equal("subdir/default-project.csproj", match.FromPath); + + // Test that a pattern-specific fromPath overrides the default + config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""fromPath"": ""subdir/default-project.csproj"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) line:(.+) column:(.+) severity:(.+) code:(.+) message:(.+) fromPath:(.+)$"", + ""file"": 1, + ""line"": 2, + ""column"": 3, + ""severity"": 4, + ""code"": 5, + ""message"": 6, + ""fromPath"": 7 + } + ] + } + ] +} +"); + config.Validate(); + matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + + match = matcher.Match("file:my-file.cs line:123 column:45 severity:real-bad code:uh-oh message:not-working fromPath:my-project.proj"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + Assert.Equal("my-project.proj", match.FromPath); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_DefaultFromPath() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""fromPath"": ""subdir/default-project.csproj"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+)$"", + ""file"": 1, + }, + { + ""regexp"": ""^severity:(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^line:(.+) column:(.+) code:(.+) message:(.+)$"", + ""line"": 1, + ""column"": 2, + ""code"": 3, + ""message"": 4 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + + var match = matcher.Match("file:my-file.cs"); + Assert.Null(match); + match = matcher.Match("severity:real-bad"); + Assert.Null(match); + match = matcher.Match("line:123 column:45 code:uh-oh message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + Assert.Equal("subdir/default-project.csproj", match.FromPath); + + // Test that pattern-specific fromPath overrides the default + config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""fromPath"": ""subdir/default-project.csproj"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) fromPath:(.+)$"", + ""file"": 1, + ""fromPath"": 2 + }, + { + ""regexp"": ""^severity:(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^line:(.+) column:(.+) code:(.+) message:(.+)$"", + ""line"": 1, + ""column"": 2, + ""code"": 3, + ""message"": 4 + } + ] + } + ] +} +"); + config.Validate(); + matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + + match = matcher.Match("file:my-file.cs fromPath:my-project.proj"); + Assert.Null(match); + match = matcher.Match("severity:real-bad"); + Assert.Null(match); + match = matcher.Match("line:123 column:45 code:uh-oh message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + Assert.Equal("my-project.proj", match.FromPath); + } } } diff --git a/src/Test/L0/Worker/JobContextL0.cs b/src/Test/L0/Worker/JobContextL0.cs new file mode 100644 index 000000000..87e334379 --- /dev/null +++ b/src/Test/L0/Worker/JobContextL0.cs @@ -0,0 +1,38 @@ +using System; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.Runner.Worker; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public class JobContextL0 + { + [Fact] + public void CheckRunId_SetAndGet_WorksCorrectly() + { + var ctx = new JobContext(); + ctx.CheckRunId = 12345; + Assert.Equal(12345, ctx.CheckRunId); + Assert.True(ctx.TryGetValue("check_run_id", out var value)); + Assert.IsType(value); + Assert.Equal(12345, ((NumberContextData)value).Value); + } + + [Fact] + public void CheckRunId_NotSet_ReturnsNull() + { + var ctx = new JobContext(); + Assert.Null(ctx.CheckRunId); + Assert.False(ctx.TryGetValue("check_run_id", out var value)); + } + + [Fact] + public void CheckRunId_SetNull_RemovesKey() + { + var ctx = new JobContext(); + ctx.CheckRunId = 12345; + ctx.CheckRunId = null; + Assert.Null(ctx.CheckRunId); + } + } +} diff --git a/src/Test/L0/Worker/JobExtensionL0.cs b/src/Test/L0/Worker/JobExtensionL0.cs index 3f5e074a8..9ce99070b 100644 --- a/src/Test/L0/Worker/JobExtensionL0.cs +++ b/src/Test/L0/Worker/JobExtensionL0.cs @@ -4,6 +4,8 @@ using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; using GitHub.DistributedTask.WebApi; using GitHub.Runner.Worker; using Moq; @@ -25,6 +27,9 @@ namespace GitHub.Runner.Common.Tests.Worker private Mock _containerProvider; private Mock _diagnosticLogManager; private Mock _jobHookProvider; + private Mock _snapshotOperationProvider; + + private Pipelines.Snapshot _requestedSnapshot; private CancellationTokenSource _tokenSource; private TestHostContext CreateTestContext([CallerMemberName] String testName = "") @@ -41,7 +46,16 @@ namespace GitHub.Runner.Common.Tests.Worker _directoryManager.Setup(x => x.PrepareDirectory(It.IsAny(), It.IsAny())) .Returns(new TrackingConfig() { PipelineDirectory = "runner", WorkspaceDirectory = "runner/runner" }); _jobHookProvider = new Mock(); + _snapshotOperationProvider = new Mock(); + _requestedSnapshot = null; + _snapshotOperationProvider + .Setup(p => p.CreateSnapshotRequestAsync(It.IsAny(), It.IsAny())) + .Returns((IExecutionContext _, object data) => + { + _requestedSnapshot = data as Pipelines.Snapshot; + return Task.CompletedTask; + }); IActionRunner step1 = new ActionRunner(); IActionRunner step2 = new ActionRunner(); IActionRunner step3 = new ActionRunner(); @@ -100,7 +114,7 @@ namespace GitHub.Runner.Common.Tests.Worker }; Guid jobId = Guid.NewGuid(); - _message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null); + _message = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, "test", "test", null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), steps, null, null, null, null, null); GitHubContext github = new(); github["repository"] = new Pipelines.ContextData.StringContextData("actions/runner"); github["secret_source"] = new Pipelines.ContextData.StringContextData("Actions"); @@ -125,6 +139,7 @@ namespace GitHub.Runner.Common.Tests.Worker hc.SetSingleton(_directoryManager.Object); hc.SetSingleton(_diagnosticLogManager.Object); hc.SetSingleton(_jobHookProvider.Object); + hc.SetSingleton(_snapshotOperationProvider.Object); hc.EnqueueInstance(_logger.Object); // JobExecutionContext hc.EnqueueInstance(_logger.Object); // job start hook hc.EnqueueInstance(_logger.Object); // Initial Job @@ -443,5 +458,114 @@ namespace GitHub.Runner.Common.Tests.Worker Assert.Equal(0, _jobEc.PostJobSteps.Count); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task EnsureNoSnapshotPostJobStep() + { + using (TestHostContext hc = CreateTestContext()) + { + var jobExtension = new JobExtension(); + jobExtension.Initialize(hc); + + _actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny(), It.IsAny>(), It.IsAny())) + .Returns(Task.FromResult(new PrepareResult(new List(), new Dictionary()))); + + _message.Snapshot = null; + await jobExtension.InitializeJob(_jobEc, _message); + + var postJobSteps = _jobEc.PostJobSteps; + Assert.Equal(0, postJobSteps.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public Task EnsureSnapshotPostJobStepForStringToken() + { + var snapshot = new Pipelines.Snapshot("TestImageNameFromStringToken"); + var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName); + return EnsureSnapshotPostJobStepForToken(imageNameValueStringToken, snapshot); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public Task EnsureSnapshotPostJobStepForMappingToken() + { + var snapshot = new Pipelines.Snapshot("TestImageNameFromMappingToken"); + var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName); + var mappingToken = new MappingToken(null, null, null) + { + { new StringToken(null,null,null, PipelineTemplateConstants.ImageName), imageNameValueStringToken } + }; + + return EnsureSnapshotPostJobStepForToken(mappingToken, snapshot); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public Task EnsureSnapshotPostJobStepForMappingToken_WithIf_Is_False() + { + var snapshot = new Pipelines.Snapshot("TestImageNameFromMappingToken", condition: $"{PipelineTemplateConstants.Success}() && 1==0", version: "2.*"); + var imageNameValueStringToken = new StringToken(null, null, null, snapshot.ImageName); + var condition = new StringToken(null, null, null, snapshot.Condition); + var version = new StringToken(null, null, null, snapshot.Version); + + var mappingToken = new MappingToken(null, null, null) + { + { new StringToken(null,null,null, PipelineTemplateConstants.ImageName), imageNameValueStringToken }, + { new StringToken(null,null,null, PipelineTemplateConstants.If), condition }, + { new StringToken(null,null,null, PipelineTemplateConstants.CustomImageVersion), version } + }; + + return EnsureSnapshotPostJobStepForToken(mappingToken, snapshot, skipSnapshotStep: true); + } + + private async Task EnsureSnapshotPostJobStepForToken(TemplateToken snapshotToken, Pipelines.Snapshot expectedSnapshot, bool skipSnapshotStep = false) + { + using (TestHostContext hc = CreateTestContext()) + { + var jobExtension = new JobExtension(); + jobExtension.Initialize(hc); + + _actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny(), It.IsAny>(), It.IsAny())) + .Returns(Task.FromResult(new PrepareResult(new List(), new Dictionary()))); + + _message.Snapshot = snapshotToken; + + await jobExtension.InitializeJob(_jobEc, _message); + + var postJobSteps = _jobEc.PostJobSteps; + + Assert.Equal(1, postJobSteps.Count); + var snapshotStep = postJobSteps.First(); + _jobEc.JobSteps.Enqueue(snapshotStep); + + var _stepsRunner = new StepsRunner(); + _stepsRunner.Initialize(hc); + await _stepsRunner.RunAsync(_jobEc); + + Assert.Equal("Create custom image", snapshotStep.DisplayName); + Assert.Equal(expectedSnapshot.Condition ?? $"{PipelineTemplateConstants.Success}()", snapshotStep.Condition); + + // Run the mock snapshot step, so we can verify it was executed with the expected snapshot object. + // await snapshotStep.RunAsync(); + if (skipSnapshotStep) + { + Assert.Null(_requestedSnapshot); + } + else + { + Assert.NotNull(_requestedSnapshot); + Assert.Equal(expectedSnapshot.ImageName, _requestedSnapshot.ImageName); + Assert.Equal(expectedSnapshot.Condition ?? $"{PipelineTemplateConstants.Success}()", _requestedSnapshot.Condition); + Assert.Equal(expectedSnapshot.Version ?? "1.*", _requestedSnapshot.Version); + } + } + } } } diff --git a/src/Test/L0/Worker/JobRunnerL0.cs b/src/Test/L0/Worker/JobRunnerL0.cs index d4aaf809c..e8011b9b0 100644 --- a/src/Test/L0/Worker/JobRunnerL0.cs +++ b/src/Test/L0/Worker/JobRunnerL0.cs @@ -101,6 +101,7 @@ namespace GitHub.Runner.Common.Tests.Worker testName, testName, null, null, null, new Dictionary(), new List(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List(), null, null, null, new ActionsEnvironmentReference("staging"), + null, messageType: messageType); message.Variables[Constants.Variables.System.Culture] = "en-US"; message.Resources.Endpoints.Add(new ServiceEndpoint() diff --git a/src/Test/L0/Worker/OutputManagerL0.cs b/src/Test/L0/Worker/OutputManagerL0.cs index 9d7f5d3f2..7005547b5 100644 --- a/src/Test/L0/Worker/OutputManagerL0.cs +++ b/src/Test/L0/Worker/OutputManagerL0.cs @@ -937,6 +937,62 @@ namespace GitHub.Runner.Common.Tests.Worker } } + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void MatcherDefaultFromPath() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + FromPath = "workflow-repo/some-project/some-project.proj", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = @"(.+): (.+)", + File = 1, + Message = 2, + }, + }, + }, + }, + }; + using (var hostContext = Setup(matchers: matchers)) + using (_outputManager) + { + // Setup github.workspace, github.repository + var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work); + ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory)); + Directory.CreateDirectory(workDirectory); + var workspaceDirectory = Path.Combine(workDirectory, "workspace"); + Directory.CreateDirectory(workspaceDirectory); + _executionContext.Setup(x => x.GetGitHubContext("workspace")).Returns(workspaceDirectory); + _executionContext.Setup(x => x.GetGitHubContext("repository")).Returns("my-org/workflow-repo"); + + // Setup a git repository + var repositoryPath = Path.Combine(workspaceDirectory, "workflow-repo"); + await CreateRepository(hostContext, repositoryPath, "https://github.com/my-org/workflow-repo"); + + // Create a test file + var filePath = Path.Combine(repositoryPath, "some-project", "some-directory", "some-file.txt"); + Directory.CreateDirectory(Path.GetDirectoryName(filePath)); + File.WriteAllText(filePath, ""); + + // Process + Process("some-directory/some-file.txt: some error"); + Assert.Equal(1, _issues.Count); + Assert.Equal("some error", _issues[0].Item1.Message); + Assert.Equal("some-project/some-directory/some-file.txt", _issues[0].Item1.Data["file"]); + Assert.Equal(0, _commands.Count); + Assert.Equal(0, _messages.Count); + } + } + [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] diff --git a/src/Test/L0/Worker/SnapshotOperationProviderL0.cs b/src/Test/L0/Worker/SnapshotOperationProviderL0.cs new file mode 100644 index 000000000..4f747ae8e --- /dev/null +++ b/src/Test/L0/Worker/SnapshotOperationProviderL0.cs @@ -0,0 +1,78 @@ +#nullable enable +using System; +using System.IO; +using System.Runtime.CompilerServices; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker; +using Moq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker; + +public class SnapshotOperationProviderL0 +{ + private Mock? _ec; + private SnapshotOperationProvider? _snapshotOperationProvider; + private string? _snapshotRequestFilePath; + private string? _snapshotRequestDirectoryPath; + + [Theory] + [InlineData(true)] + [InlineData(false)] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void CreateSnapshotRequestAsync(bool shouldSnapshotDirectoryAlreadyExist) + { + using (TestHostContext testHostContext = CreateTestHostContext()) + { + //Arrange + Setup(testHostContext, shouldSnapshotDirectoryAlreadyExist); + var expectedSnapshot = new Snapshot(Guid.NewGuid().ToString()); + + //Act + await _snapshotOperationProvider!.CreateSnapshotRequestAsync(_ec!.Object, expectedSnapshot); + + //Assert + var actualSnapshot = IOUtil.LoadObject(_snapshotRequestFilePath); + Assert.NotNull(actualSnapshot); + Assert.Equal(expectedSnapshot.ImageName, actualSnapshot!.ImageName); + _ec.Verify(ec => ec.Write(null, $"Request written to: {_snapshotRequestFilePath}"), Times.Once); + _ec.Verify(ec => ec.Write(null, "This request will be processed after the job completes. You will not receive any feedback on the snapshot process within the workflow logs of this job."), Times.Once); + _ec.Verify(ec => ec.Write(null, "If the snapshot process is successful, you should see a new image with the requested name in the list of available custom images when creating a new GitHub-hosted Runner."), Times.Once); + _ec.VerifyNoOtherCalls(); + } + } + + private void Setup(IHostContext hostContext, bool shouldSnapshotDirectoryAlreadyExist) + { + _ec = new Mock(); + _snapshotOperationProvider = new SnapshotOperationProvider(); + _snapshotOperationProvider.Initialize(hostContext); + _snapshotRequestFilePath = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Root), ".snapshot", "request.json"); + _snapshotRequestDirectoryPath = Path.GetDirectoryName(_snapshotRequestFilePath); + + if (_snapshotRequestDirectoryPath != null) + { + // Clean up any existing the snapshot directory and its contents before starting the test. + if (Directory.Exists(_snapshotRequestDirectoryPath)) + { + Directory.Delete(_snapshotRequestDirectoryPath, true); + } + + if (shouldSnapshotDirectoryAlreadyExist) + { + // Create a fresh snapshot directory if it's required for the test. + Directory.CreateDirectory(_snapshotRequestDirectoryPath); + } + } + } + + private TestHostContext CreateTestHostContext([CallerMemberName] string testName = "") + { + var testHostContext = new TestHostContext(this, testName); + _ec = new Mock(); + _ec.Object.Initialize(testHostContext); + return testHostContext; + } +} diff --git a/src/Test/L0/Worker/StepHostL0.cs b/src/Test/L0/Worker/StepHostL0.cs index f6b58890c..bac7d41d9 100644 --- a/src/Test/L0/Worker/StepHostL0.cs +++ b/src/Test/L0/Worker/StepHostL0.cs @@ -75,10 +75,10 @@ namespace GitHub.Runner.Common.Tests.Worker .ReturnsAsync(0); // Act. - var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node16"); + var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node20"); // Assert. - Assert.Equal("node16_alpine", nodeVersion); + Assert.Equal("node20_alpine", nodeVersion); } } @@ -129,10 +129,10 @@ namespace GitHub.Runner.Common.Tests.Worker .ReturnsAsync(0); // Act. - var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node16"); + var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node20"); // Assert. - Assert.Equal("node16", nodeVersion); + Assert.Equal("node20", nodeVersion); } } @@ -162,6 +162,60 @@ namespace GitHub.Runner.Common.Tests.Worker Assert.Equal("node20", nodeVersion); } } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task DetermineNode24RuntimeVersionInAlpineContainerAsync() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var sh = new ContainerStepHost(); + sh.Initialize(hc); + sh.Container = new ContainerInfo() { ContainerId = "1234abcd" }; + + _dc.Setup(d => d.DockerExec(_ec.Object, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) + .Callback((IExecutionContext ec, string id, string options, string command, List output) => + { + output.Add("alpine"); + }) + .ReturnsAsync(0); + + // Act. + var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node24"); + + // Assert. + Assert.Equal("node24_alpine", nodeVersion); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task DetermineNode24RuntimeVersionInUnknownContainerAsync() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var sh = new ContainerStepHost(); + sh.Initialize(hc); + sh.Container = new ContainerInfo() { ContainerId = "1234abcd" }; + + _dc.Setup(d => d.DockerExec(_ec.Object, It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) + .Callback((IExecutionContext ec, string id, string options, string command, List output) => + { + output.Add("github"); + }) + .ReturnsAsync(0); + + // Act. + var nodeVersion = await sh.DetermineNodeRuntimeVersion(_ec.Object, "node24"); + + // Assert. + Assert.Equal("node24", nodeVersion); + } + } #endif } } diff --git a/src/Test/L0/Worker/StepHostNodeVersionL0.cs b/src/Test/L0/Worker/StepHostNodeVersionL0.cs new file mode 100644 index 000000000..6ba8c9fa4 --- /dev/null +++ b/src/Test/L0/Worker/StepHostNodeVersionL0.cs @@ -0,0 +1,63 @@ +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Handlers; +using Moq; +using System; +using System.Runtime.InteropServices; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class StepHostNodeVersionL0 + { + private Mock _ec; + private DefaultStepHost _defaultStepHost; + + public StepHostNodeVersionL0() + { + _ec = new Mock(); + _defaultStepHost = new DefaultStepHost(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void CheckNodeVersionForArm32_Node24OnArm32Linux() + { + // Test via NodeUtil directly + string preferredVersion = "node24"; + var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion); + + // On ARM32 Linux, we should fall back to node20 + bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm || + Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true; + bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux); + + if (isArm32 && isLinux) + { + // Should downgrade to node20 on ARM32 Linux + Assert.Equal("node20", nodeVersion); + Assert.NotNull(warningMessage); + Assert.Contains("Node 24 is not supported on Linux ARM32 platforms", warningMessage); + } + else + { + // On non-ARM32 platforms, should pass through the version unmodified + Assert.Equal("node24", nodeVersion); + Assert.Null(warningMessage); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void CheckNodeVersionForArm32_PassThroughNonNode24Versions() + { + string preferredVersion = "node20"; + var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion); + + // Should never modify the version for non-node24 inputs + Assert.Equal("node20", nodeVersion); + Assert.Null(warningMessage); + } + } +} diff --git a/src/Test/L0/Worker/WorkerL0.cs b/src/Test/L0/Worker/WorkerL0.cs index fe1f9c02c..defcc9814 100644 --- a/src/Test/L0/Worker/WorkerL0.cs +++ b/src/Test/L0/Worker/WorkerL0.cs @@ -67,7 +67,7 @@ namespace GitHub.Runner.Common.Tests.Worker new Pipelines.ContextData.DictionaryContextData() }, }; - var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List(), resources, context, null, actions, null, null, null, null); + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List(), resources, context, null, actions, null, null, null, null, null); return jobRequest; } diff --git a/src/Test/Test.csproj b/src/Test/Test.csproj index 0f3360130..a5a19aea7 100644 --- a/src/Test/Test.csproj +++ b/src/Test/Test.csproj @@ -1,9 +1,9 @@ - net6.0 + net8.0 win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64 true - NU1701;NU1603;NU1603;xUnit2013; + NU1701;NU1603;NU1603;xUnit2013;SYSLIB0050;SYSLIB0051 @@ -15,13 +15,12 @@ - - - - - + + + + - + diff --git a/src/Test/TestData/node24action.yml b/src/Test/TestData/node24action.yml new file mode 100644 index 000000000..653e558a0 --- /dev/null +++ b/src/Test/TestData/node24action.yml @@ -0,0 +1,20 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + deprecationMessage: 'This property has been deprecated' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node24' + main: 'main.js' \ No newline at end of file diff --git a/src/dev.sh b/src/dev.sh index fa637d116..8457772ab 100755 --- a/src/dev.sh +++ b/src/dev.sh @@ -17,7 +17,7 @@ LAYOUT_DIR="$SCRIPT_DIR/../_layout" DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x" PACKAGE_DIR="$SCRIPT_DIR/../_package" DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk" -DOTNETSDK_VERSION="6.0.418" +DOTNETSDK_VERSION="8.0.413" DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION" RUNNER_VERSION=$(cat runnerversion) @@ -199,6 +199,7 @@ function package () popd > /dev/null } +# Install .NET SDK if [[ (! -d "${DOTNETSDK_INSTALLDIR}") || (! -e "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}") || (! -e "${DOTNETSDK_INSTALLDIR}/dotnet") ]]; then # Download dotnet SDK to ../_dotnetsdk directory diff --git a/src/global.json b/src/global.json index b014aea08..1b22b198f 100644 --- a/src/global.json +++ b/src/global.json @@ -1,5 +1,5 @@ { "sdk": { - "version": "6.0.418" + "version": "8.0.413" } } diff --git a/src/runnerversion b/src/runnerversion index 96e7232dc..2a32b8c5c 100644 --- a/src/runnerversion +++ b/src/runnerversion @@ -1 +1 @@ -2.312.0 +2.328.0