Compare commits

..

3 Commits

Author SHA1 Message Date
Ferenc Hammerl
c577be6e62 Update releaseVersion 2022-10-04 17:28:24 +02:00
Ferenc Hammerl
3dd27755cf Release notes for 2.298.1 (#2174)
* Fix incorrect template vars to show SHA for WIN-ARM64 (#2171)

* Update releaseNote.md

* Update runnerversion
2022-10-04 17:14:13 +02:00
Francesco Renzi
3b8cfdae4e Update releaseVersion to 2.298.0 2022-10-04 12:18:21 +00:00
219 changed files with 5118 additions and 12195 deletions

View File

@@ -1,27 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
{
"name": "Actions Runner Devcontainer",
"image": "mcr.microsoft.com/devcontainers/base:focal",
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.405"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "16"
}
},
"customizations": {
"vscode": {
"extensions": [
"ms-azuretools.vscode-docker",
"ms-dotnettools.csharp",
"eamodio.gitlens"
]
}
},
// dotnet restore to install dependencies so OmniSharp works out of the box
// src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
"remoteUser": "vscode"
}

View File

@@ -1,7 +1,6 @@
# https://editorconfig.org/
[*]
charset = utf-8 # Set default charset to utf-8
insert_final_newline = true # ensure all files end with a single newline
trim_trailing_whitespace = true # attempt to remove trailing whitespace on save

View File

@@ -1,8 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: 🛑 Request a feature in the runner application
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
- name: ✅ Support for GitHub Actions
url: https://github.community/c/code-to-cloud/52
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.

View File

@@ -0,0 +1,32 @@
---
name: 🛑 Request a feature in the runner application
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
title: ''
labels: enhancement
assignees: ''
---
<!--
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
Some additional useful links:
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
-->
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.
**Code Snippet**
If applicable, add a code snippet.
**Additional information**
Add any other context about the feature here.
NOTE: if the feature request has been agreed upon then the assignee will create an ADR. See docs/adrs/README.md

View File

@@ -27,7 +27,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
@@ -38,4 +38,4 @@ jobs:
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1

View File

@@ -1,24 +0,0 @@
name: Lint
on:
pull_request:
branches: [ main ]
jobs:
build:
name: Lint
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
# Ensure full list of changed files within `super-linter`
fetch-depth: 0
- name: Run linters
uses: github/super-linter@v4
env:
DEFAULT_BRANCH: ${{ github.base_ref }}
EDITORCONFIG_FILE_NAME: .editorconfig
LINTER_RULES_PATH: /src/
VALIDATE_ALL_CODEBASE: false
VALIDATE_CSHARP: true

View File

@@ -1,65 +0,0 @@
name: Publish Runner Image
on:
workflow_dispatch:
inputs:
runnerVersion:
type: string
description: Version of the runner being installed
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Compute image version
id: image
uses: actions/github-script@v6
with:
script: |
const fs = require('fs');
const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}"
if (inputRunnerVersion) {
console.log(`Using input runner version ${inputRunnerVersion}`)
core.setOutput('version', inputRunnerVersion);
return
}
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
console.log(`Using runner version ${runnerVersion}`)
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v3
with:
context: ./images
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT

View File

@@ -131,7 +131,7 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
shell: bash
id: sha
name: Compute SHA256
@@ -140,8 +140,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noexternals
name: Compute SHA256
@@ -150,8 +150,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime
name: Compute SHA256
@@ -160,8 +160,8 @@ jobs:
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "${{matrix.runtime}}-sha256=$sha" >> $GITHUB_OUTPUT
echo "sha256=$sha" >> $GITHUB_OUTPUT
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256
@@ -660,52 +660,3 @@ jobs:
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
publish-image:
needs: release
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Compute image version
id: image
uses: actions/github-script@v6
with:
script: |
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
console.log(`Using runner version ${runnerVersion}`)
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v3
with:
context: ./images
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT

View File

@@ -22,4 +22,4 @@ Runner releases:
## Contribute
We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing.
We accept contributions in the form of issues and pull requests. [Read more here](docs/contribute.md) before contributing.

View File

@@ -1,65 +0,0 @@
# ADR 2494: Runner Image Tags
**Date**: 2023-03-17
**Status**: Accepted<!-- |Accepted|Rejected|Superceded|Deprecated -->
## Context
Following the [adoption of actions-runner-controller by GitHub](https://github.com/actions/actions-runner-controller/discussions/2072) and the introduction of the new runner scale set autoscaling mode, we needed to provide a basic runner image that could be used off the shelf without much friction.
The [current runner image](https://github.com/actions/runner/pkgs/container/actions-runner) is published to GHCR. Each release of this image is tagged with the runner version and the most recent release is also tagged with `latest`.
While the use of `latest` is common practice, we recommend that users pin a specific version of the runner image for a predictable runtime and improved security posture. However, we still notice that a large number of end users are relying on the `latest` tag & raising issues when they encounter problems.
Add to that, the community actions-runner-controller maintainers have issued a [deprecation notice](https://github.com/actions/actions-runner-controller/issues/2056) of the `latest` tag for the existing runner images (https://github.com/orgs/actions-runner-controller/packages).
## Decision
Proceed with Option 2, keeping the `latest` tag and adding the `NOTES.txt` file to our helm charts with the notice.
### Option 1: Remove the `latest` tag
By removing the `latest` tag, we have to proceed with either of these options:
1. Remove the runner image reference in the `values.yaml` provided with the `gha-runner-scale-set` helm chart and mark these fields as required so that users have to explicitly specify a runner image and a specific tag. This will obviously introduce more friction for users who want to start using actions-runner-controller for the first time.
```yaml
spec:
containers:
- name: runner
image: ""
tag: ""
command: ["/home/runner/run.sh"]
```
1. Pin a specific runner image tag in the `values.yaml` provided with the `gha-runner-scale-set` helm chart. This will reduce friction for users who want to start using actions-runner-controller for the first time but will require us to update the `values.yaml` with every new runner release.
```yaml
spec:
containers:
- name: runner
image: "ghcr.io/actions/actions-runner"
tag: "v2.300.0"
command: ["/home/runner/run.sh"]
```
### Option 2: Keep the `latest` tag
Keeping the `latest` tag is also a reasonable option especially if we don't expect to make any breaking changes to the runner image. We could enhance this by adding a [NOTES.txt](https://helm.sh/docs/chart_template_guide/notes_files/) to the helm chart which will be displayed to the user after a successful helm install/upgrade. This will help users understand the implications of using the `latest` tag and how to pin a specific version of the runner image.
The runner image release workflow will need to be updated so that the image is pushed to GHCR and tagged only when the runner rollout has reached all scale units.
## Consequences
Proceeding with **option 1** means:
1. We will enhance the runtime predictability and security posture of our end users
1. We will have to update the `values.yaml` with every new runner release (that can be automated)
1. We will introduce friction for users who want to start using actions-runner-controller for the first time
Proceeding with **option 2** means:
1. We will have to continue to maintain the `latest` tag
1. We will assume that end users will be able to handle the implications of using the `latest` tag
1. Runner image release workflow needs to be updated

View File

@@ -64,4 +64,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
## Still not working?
Contact [GitHub Support](https://support.github.com) if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.

View File

@@ -1,6 +1,6 @@
# Contributions
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
@@ -157,12 +157,4 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
## Styling
We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/runtime/blob/main/docs/coding-guidelines/coding-style.md)
### Format C# Code
To format both staged and unstaged .cs files
```
cd ./src
./dev.(cmd|sh) format
```
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)

View File

@@ -35,7 +35,7 @@ All the configs below can be found in `.vscode/launch.json`.
If you launch `Run` or `Run [build]`, it starts a process called `Runner.Listener`.
This process will receive any job queued on this repository if the job runs on matching labels (e.g `runs-on: self-hosted`).
Once a job is received, a `Runner.Listener` starts a new process of `Runner.Worker`.
Since this is a different process, you can't use the same debugger session debug it.
Since this is a diferent process, you can't use the same debugger session debug it.
Instead, a parallel debugging session has to be started, using a different launch config.
Luckily, VS Code supports multiple parallel debugging sessions.

View File

@@ -1,49 +0,0 @@
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.1
ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y
WORKDIR /actions-runner
RUN curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
&& tar xzf ./runner.tar.gz \
&& rm runner.tar.gz
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v${RUNNER_CONTAINER_HOOKS_VERSION}/actions-runner-hooks-k8s-${RUNNER_CONTAINER_HOOKS_VERSION}.zip \
&& unzip ./runner-container-hooks.zip -d ./k8s \
&& rm runner-container-hooks.zip
RUN export DOCKER_ARCH=x86_64 \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
&& curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
&& tar zxvf docker.tgz \
&& rm -rf docker.tgz
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner

View File

@@ -1,22 +1,12 @@
## Features
- Runner changes for communication with Results service (#2510, #2531, #2535, #2516)
- Add `*.ghe.localhost` domains to hosted server check (#2536)
- Add `OrchestrationId` to user-agent for better telemetry correlation. (#2568)
- Service containers startup error logs are now included in workflow's logs (#2110)
## Bugs
- Fix JIT configurations on Windows (#2497)
- Guard against NullReference while creating HostContext (#2343)
- Handles broken symlink in `Which` (#2150, #2196)
- Adding curl retry for external tool downloads (#2552, #2557)
- Limit the time we wait for waiting websocket to connect. (#2554)
- Fixed missing SHA for Windows arm64 release archive (#2171)
## Misc
- Bump container hooks version to 0.3.1 in runner image (#2496)
- Runner changes to communicate with vNext services (#2487, #2500, #2505, #2541, #2547)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
- Added a feature flag to start warning on `save-state` and `set-output` deprecation (#2164)
- Prepare supporting `vars` in workflow templates (#2096)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release>
2.298.1

View File

@@ -13,7 +13,7 @@ set -e
flags_found=false
while getopts 's:g:n:r:u:l:df' opt; do
while getopts 's:g:n:r:u:l:' opt; do
flags_found=true
case $opt in
@@ -35,12 +35,6 @@ while getopts 's:g:n:r:u:l:df' opt; do
l)
labels=$OPTARG
;;
f)
replace='true'
;;
d)
disableupdate='true'
;;
*)
echo "
Runner Service Installer
@@ -55,9 +49,7 @@ Usage:
-n optional name of the runner, defaults to hostname
-r optional name of the runner group to add the runner to, defaults to the Default group
-u optional user svc will run as, defaults to current
-l optional list of labels (split by comma) applied on the runner
-d optional allow runner to remain on the current version for one month after the release of a newer version
-f optional replace any existing runner with the same name"
-l optional list of labels (split by comma) applied on the runner"
exit 0
;;
esac
@@ -177,8 +169,8 @@ fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"} ${disableupdate:+--disableupdate}"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN ${replace:+--replace} --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"} ${disableupdate:+--disableupdate}
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
#---------------------------------------
# Configuring as a service

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#/bin/bash
set -e
@@ -12,7 +12,7 @@ set -e
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./delete.sh <scope> [<name>]
# ./delete.sh scope name
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to delete
@@ -26,17 +26,17 @@ set -e
runner_scope=${1}
runner_name=${2}
function fatal()
echo "Deleting runner ${runner_name} @ ${runner_scope}"
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
if [ -z "${runner_name}" ]; then runner_name=`hostname`; fi
echo "Deleting runner ${runner_name} @ ${runner_scope}"
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"

View File

@@ -1 +1 @@
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602
1d709d93e5d3c6c6c656a61aa6c1781050224788a05b0e6ecc4c3c0408bdf89c

View File

@@ -1 +1 @@
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5
b92a47cfeaad02255b1f7a377060651b73ae5e5db22a188dbbcb4183ab03a03d

View File

@@ -1 +1 @@
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c
68a9a8ef0843a8bb74241894f6f63fd76241a82295c5337d3cc7a940a314c78e

View File

@@ -1 +1 @@
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e
02c7126ff4d63ee2a0ae390c81434c125630522aadf35903bbeebb1a99d8af99

View File

@@ -1 +1 @@
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c
c9d5a542f8d765168855a89e83ae0a8970d00869041c4f9a766651c04c72b212

View File

@@ -1 +1 @@
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673
39d0683f0f115a211cb10c473e9574c16549a19d4e9a6c637ded3d7022bf809f

View File

@@ -1 +1 @@
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3
d94f2fbaf210297162bc9f3add819d73682c3aa6899e321c3872412b924d5504

View File

@@ -1 +1 @@
3807dcbf947e840c33535fb466b096d76bf09e5c0254af8fc8cbbb24c6388222
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51

View File

@@ -1 +1 @@
ee01eee80cd8a460a4b9780ee13fdd20f25c59e754b4ccd99df55fbba2a85634
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78

View File

@@ -1 +1 @@
a9fb9c14e24e79aec97d4da197dd7bfc6364297d6fce573afb2df48cc9a931f8
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a

View File

@@ -1 +1 @@
a4e0e8fc62eba0967a39c7d693dcd0aeb8b2bed0765f9c38df80d42884f65341
cc4708962a80325de0baa5ae8484e0cb9ae976ac6a4178c1c0d448b8c52bd7f7

View File

@@ -1 +1 @@
17ac17fbe785b3d6fa2868d8d17185ebfe0c90b4b0ddf6b67eac70e42bcd989b
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b

View File

@@ -1 +1 @@
89f24657a550f1e818b0e9975e5b80edcf4dd22b7d4bccbb9e48e37f45d30fb1
e5dace2d41cc0682d096dcce4970079ad48ec7107e46195970eecfdb3df2acef

View File

@@ -1 +1 @@
24fd131b5dce33ef16038b771407bc0507da8682a72fb3b7780607235f76db0b
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18

View File

@@ -14,7 +14,7 @@
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@vercel/ncc": "^0.36.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"prettier": "^1.19.1",
@@ -346,10 +346,11 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@vercel/ncc": {
"version": "0.36.0",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.0.tgz",
"integrity": "sha512-/ZTUJ/ZkRt694k7KJNimgmHjtQcRuVwsST2Z6XfYveQIuBbHR+EqkTc1jfgPkQmMyk/vtpxo3nVxe8CNuau86A==",
"node_modules/@zeit/ncc": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.20.5.tgz",
"integrity": "sha512-XU6uzwvv95DqxciQx+aOLhbyBx/13ky+RK1y88Age9Du3BlA4mMPCy13BGjayOrrumOzlq1XV3SD/BWiZENXlw==",
"deprecated": "@zeit/ncc is no longer maintained. Please use @vercel/ncc instead.",
"dev": true,
"bin": {
"ncc": "dist/ncc/cli.js"
@@ -1721,9 +1722,9 @@
"dev": true
},
"node_modules/json5": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"dev": true,
"dependencies": {
"minimist": "^1.2.0"
@@ -1823,9 +1824,9 @@
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@@ -2746,10 +2747,10 @@
"eslint-visitor-keys": "^3.0.0"
}
},
"@vercel/ncc": {
"version": "0.36.0",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.0.tgz",
"integrity": "sha512-/ZTUJ/ZkRt694k7KJNimgmHjtQcRuVwsST2Z6XfYveQIuBbHR+EqkTc1jfgPkQmMyk/vtpxo3nVxe8CNuau86A==",
"@zeit/ncc": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.20.5.tgz",
"integrity": "sha512-XU6uzwvv95DqxciQx+aOLhbyBx/13ky+RK1y88Age9Du3BlA4mMPCy13BGjayOrrumOzlq1XV3SD/BWiZENXlw==",
"dev": true
},
"acorn": {
@@ -3755,9 +3756,9 @@
"dev": true
},
"json5": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"dev": true,
"requires": {
"minimist": "^1.2.0"
@@ -3839,9 +3840,9 @@
}
},
"minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "^1.1.7"
}

View File

@@ -26,7 +26,7 @@
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@vercel/ncc": "^0.36.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"prettier": "^1.19.1",

View File

@@ -5,7 +5,7 @@ PRECACHE=$2
NODE_URL=https://nodejs.org/dist
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
NODE12_VERSION="12.22.7"
NODE16_VERSION="16.16.0"
NODE16_VERSION="16.13.0"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args
@@ -55,23 +55,12 @@ function acquireExternalTool() {
# Download from source to the partial file.
echo "Downloading $download_source"
mkdir -p "$(dirname "$download_target")" || checkRC 'mkdir'
CURL_VERSION=$(curl --version | awk 'NR==1{print $2}')
echo "Curl version: $CURL_VERSION"
# curl -f Fail silently (no output at all) on HTTP errors (H)
# -k Allow connections to SSL sites without certs (H)
# -S Show error. With -s, make curl show errors when they occur
# -L Follow redirects (H)
# -o FILE Write to FILE instead of stdout
# --retry 3 Retries transient errors 3 times (timeouts, 5xx)
if [[ "$(printf '%s\n' "7.71.0" "$CURL_VERSION" | sort -V | head -n1)" != "7.71.0" ]]; then
# Curl version is less than or equal to 7.71.0, skipping retry-all-errors flag
curl -fkSL --retry 3 -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
else
# Curl version is greater than 7.71.0, running curl with --retry-all-errors flag
curl -fkSL --retry 3 --retry-all-errors -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
fi
curl -fkSL -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl'
# Move the partial file to the download target.
mv "$partial_target" "$download_target" || checkRC 'mv'

View File

@@ -24,8 +24,7 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -63,8 +62,7 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -94,38 +92,24 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
consecutiveFailureCount = 0;
} else {
var messagePrefix = "Runner listener exit with undefined return code";
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
unknownFailureRetryCount >= exitServiceAfterNFailures
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
stopping = true
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

File diff suppressed because it is too large Load Diff

View File

@@ -18,20 +18,6 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Wait for docker to start
if [ ! -z "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" ]; then
if [ "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" -gt 0 ]; then
echo "Waiting for docker to be ready."
for i in $(seq "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS"); do
if docker ps > /dev/null 2>&1; then
echo "Docker is ready."
break
fi
"$DIR"/safe_sleep.sh 1
done
fi
fi
updateFile="update.finished"
"$DIR"/bin/Runner.Listener run $*

View File

@@ -9,79 +9,16 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
run() {
# run the helper process which keep the listener alive
while :;
do
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
"$DIR"/run-helper.sh $*
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..."
else
echo "Exiting runner..."
exit 0
fi
done
}
runWithManualTrap() {
# Set job control
set -m
trap 'kill -INT -$PID' INT TERM
# run the helper process which keep the listener alive
while :;
do
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
"$DIR"/run-helper.sh $* &
PID=$!
wait -f $PID
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..."
else
echo "Exiting runner..."
# Unregister signal handling before exit
trap - INT TERM
# wait for last parts to be logged
wait $PID
exit $returnCode
fi
done
}
function updateCerts() {
local sudo_prefix=""
local user_id=`id -u`
if [ $user_id -ne 0 ]; then
if [[ ! -x "$(command -v sudo)" ]]; then
echo "Warning: failed to update certificate store: sudo is required but not found"
return 1
else
sudo_prefix="sudo"
fi
fi
if [[ -x "$(command -v update-ca-certificates)" ]]; then
eval $sudo_prefix "update-ca-certificates"
elif [[ -x "$(command -v update-ca-trust)" ]]; then
eval $sudo_prefix "update-ca-trust"
# run the helper process which keep the listener alive
while :;
do
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
"$DIR"/run-helper.sh $*
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..."
else
echo "Warning: failed to update certificate store: update-ca-certificates or update-ca-trust not found. This can happen if you're using a different runner base image."
return 1
echo "Exiting runner..."
exit 0
fi
}
if [[ ! -z "$RUNNER_UPDATE_CA_CERTS" ]]; then
updateCerts
fi
if [[ -z "$RUNNER_MANUALLY_TRAP_SIG" ]]; then
run $*
else
runWithManualTrap $*
fi
done

View File

@@ -74,7 +74,6 @@ Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll

View File

@@ -31,7 +31,7 @@ namespace GitHub.Runner.Common
new EscapeMapping(token: "%", replacement: "%25"),
};
private readonly Dictionary<string, string> _properties = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
public const string Prefix = "##[";
public const string _commandKey = "::";

View File

@@ -1,51 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(ActionsRunServer))]
public interface IActionsRunServer : IRunnerService
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
}
public sealed class ActionsRunServer : RunnerService, IActionsRunServer
{
private bool _hasConnection;
private VssConnection _connection;
private TaskAgentHttpClient _taskAgentClient;
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
{
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
_taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
_hasConnection = true;
}
private void CheckConnection()
{
if (!_hasConnection)
{
throw new InvalidOperationException($"SetConnection");
}
}
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
{
CheckConnection();
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
{
return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
}, cancellationToken);
return jobMessage;
}
}
}

View File

@@ -1,56 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(BrokerServer))]
public interface IBrokerServer : IRunnerService
{
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken token, TaskAgentStatus status, string version);
}
public sealed class BrokerServer : RunnerService, IBrokerServer
{
private bool _hasConnection;
private Uri _brokerUri;
private RawConnection _connection;
private BrokerHttpClient _brokerHttpClient;
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
{
_brokerUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_brokerHttpClient = await _connection.GetClientAsync<BrokerHttpClient>();
_hasConnection = true;
}
private void CheckConnection()
{
if (!_hasConnection)
{
throw new InvalidOperationException($"SetConnection");
}
}
public Task<TaskAgentMessage> GetRunnerMessageAsync(CancellationToken cancellationToken, TaskAgentStatus status, string version)
{
CheckConnection();
var jobMessage = RetryRequest<TaskAgentMessage>(
async () => await _brokerHttpClient.GetRunnerMessageAsync(version, status, cancellationToken), cancellationToken);
return jobMessage;
}
}
}

View File

@@ -50,12 +50,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool UseV2Flow { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrlV2 { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{
@@ -80,18 +74,17 @@ namespace GitHub.Runner.Common
{
get
{
Uri accountUri = new(this.ServerUrl);
Uri accountUri = new Uri(this.ServerUrl);
string repoOrOrgName = string.Empty;
if (accountUri.Host.EndsWith(".githubusercontent.com", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(this.GitHubUrl))
if (accountUri.Host.EndsWith(".githubusercontent.com", StringComparison.OrdinalIgnoreCase))
{
Uri gitHubUrl = new(this.GitHubUrl);
Uri gitHubUrl = new Uri(this.GitHubUrl);
// Use the "NWO part" from the GitHub URL path
repoOrOrgName = gitHubUrl.AbsolutePath.Trim('/');
}
if (string.IsNullOrEmpty(repoOrOrgName))
else
{
repoOrOrgName = accountUri.AbsolutePath.Split('/', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault();
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace GitHub.Runner.Common
{
@@ -90,6 +90,7 @@ namespace GitHub.Runner.Common
public static class Args
{
public static readonly string Auth = "auth";
public static readonly string JitConfig = "jitconfig";
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
@@ -104,13 +105,11 @@ namespace GitHub.Runner.Common
public static readonly string Token = "token";
public static readonly string PAT = "pat";
public static readonly string WindowsLogonPassword = "windowslogonpassword";
public static readonly string JitConfig = "jitconfig";
public static string[] Secrets => new[]
{
PAT,
Token,
WindowsLogonPassword,
JitConfig,
};
}
@@ -129,10 +128,7 @@ namespace GitHub.Runner.Common
public static readonly string Check = "check";
public static readonly string Commit = "commit";
public static readonly string Ephemeral = "ephemeral";
public static readonly string GenerateServiceConfig = "generateServiceConfig";
public static readonly string Help = "help";
public static readonly string Local = "local";
public static readonly string NoDefaultLabels = "no-default-labels";
public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it
@@ -160,17 +156,14 @@ namespace GitHub.Runner.Common
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly Guid TelemetryRecordId = new Guid("11111111-1111-1111-1111-111111111111");
public static readonly string WorkerCrash = "WORKER_CRASH";
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string ResultsUploadFailure = "RESULTS_UPLOAD_FAILURE";
public static readonly string UnsupportedCommandMessage = "The `{0}` command is deprecated and will be disabled soon. Please upgrade to using Environment Files. For more information see: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/.";
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/. Please update the following actions to use Node.js 16: {0}";
}
public static class RunnerEvent
@@ -251,7 +244,6 @@ namespace GitHub.Runner.Common
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
}
public static class System
@@ -262,8 +254,6 @@ namespace GitHub.Runner.Common
public static readonly string AccessToken = "system.accessToken";
public static readonly string Culture = "system.culture";
public static readonly string PhaseDisplayName = "system.phaseDisplayName";
public static readonly string JobRequestType = "system.jobRequestType";
public static readonly string OrchestrationId = "system.orchestrationId";
}
}

View File

@@ -14,7 +14,7 @@ namespace GitHub.Runner.Common
public sealed class ExtensionManager : RunnerService, IExtensionManager
{
private readonly ConcurrentDictionary<Type, List<IExtension>> _cache = new();
private readonly ConcurrentDictionary<Type, List<IExtension>> _cache = new ConcurrentDictionary<Type, List<IExtension>>();
public List<T> GetExtensions<T>() where T : class, IExtension
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
@@ -51,12 +51,12 @@ namespace GitHub.Runner.Common
private static int _defaultLogRetentionDays = 30;
private static int[] _vssHttpMethodEventIds = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 24 };
private static int[] _vssHttpCredentialEventIds = new int[] { 11, 13, 14, 15, 16, 17, 18, 20, 21, 22, 27, 29 };
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new();
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
private readonly ISecretMasker _secretMasker = new SecretMasker();
private readonly List<ProductInfoHeaderValue> _userAgents = new() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private CancellationTokenSource _runnerShutdownTokenSource = new();
private object _perfLock = new();
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
private object _perfLock = new object();
private Tracing _trace;
private Tracing _actionsHttpTrace;
private Tracing _netcoreHttpTrace;
@@ -66,7 +66,7 @@ namespace GitHub.Runner.Common
private IDisposable _diagListenerSubscription;
private StartupType _startupType;
private string _perfFile;
private RunnerWebProxy _webProxy = new();
private RunnerWebProxy _webProxy = new RunnerWebProxy();
public event EventHandler Unloading;
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
@@ -94,13 +94,6 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPreAmpersandEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPostAmpersandEscape);
// Create StdoutTraceListener if ENV is set
StdoutTraceListener stdoutTraceListener = null;
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.PrintLogToStdout)))
{
stdoutTraceListener = new StdoutTraceListener(hostType);
}
// Create the trace manager.
if (string.IsNullOrEmpty(logFile))
{
@@ -120,11 +113,11 @@ namespace GitHub.Runner.Common
// this should give us _diag folder under runner root directory
string diagLogDirectory = Path.Combine(new DirectoryInfo(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location)).Parent.FullName, Constants.Path.DiagDirectory);
_traceManager = new TraceManager(new HostTraceListener(diagLogDirectory, hostType, logPageSize, logRetentionDays), stdoutTraceListener, this.SecretMasker);
_traceManager = new TraceManager(new HostTraceListener(diagLogDirectory, hostType, logPageSize, logRetentionDays), this.SecretMasker);
}
else
{
_traceManager = new TraceManager(new HostTraceListener(logFile), stdoutTraceListener, this.SecretMasker);
_traceManager = new TraceManager(new HostTraceListener(logFile), this.SecretMasker);
}
_trace = GetTrace(nameof(HostContext));
@@ -220,26 +213,12 @@ namespace GitHub.Runner.Common
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile, true);
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
var extraUserAgent = Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_EXTRA_USER_AGENT");
if (!string.IsNullOrEmpty(extraUserAgent))
{
var extraUserAgentSplit = extraUserAgent.Split('/', StringSplitOptions.RemoveEmptyEntries);
if (extraUserAgentSplit.Length != 2)
{
_trace.Error($"GITHUB_ACTIONS_RUNNER_EXTRA_USER_AGENT is not in the format of 'name/version'.");
}
var extraUserAgentHeader = new ProductInfoHeaderValue(extraUserAgentSplit[0], extraUserAgentSplit[1]);
_trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests.");
_userAgents.Add(extraUserAgentHeader);
}
}
public string GetDirectory(WellKnownDirectory directory)

View File

@@ -164,7 +164,7 @@ namespace GitHub.Runner.Common
{
if (_enableLogRetention)
{
DirectoryInfo diags = new(_logFileDirectory);
DirectoryInfo diags = new DirectoryInfo(_logFileDirectory);
var logs = diags.GetFiles($"{_logFilePrefix}*.log");
foreach (var log in logs)
{

View File

@@ -11,8 +11,6 @@ using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Utilities.Internal;
@@ -199,15 +197,13 @@ namespace GitHub.Runner.Common
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
this._websocketClient = null;
}
}
@@ -254,7 +250,7 @@ namespace GitHub.Runner.Common
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Verbose(ex.ToString());
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
@@ -309,7 +305,6 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{
CheckConnection();

View File

@@ -17,10 +17,9 @@ namespace GitHub.Runner.Common
TaskCompletionSource<int> JobRecordUpdated { get; }
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false);
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -31,7 +30,6 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -41,36 +39,31 @@ namespace GitHub.Runner.Common
private Guid _jobTimelineRecordId;
// queue for web console line
private readonly ConcurrentQueue<ConsoleLineInfo> _webConsoleLineQueue = new();
private readonly ConcurrentQueue<ConsoleLineInfo> _webConsoleLineQueue = new ConcurrentQueue<ConsoleLineInfo>();
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new ConcurrentQueue<UploadFileInfo>();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>>();
// indicate how many timelines we have, we will process _timelineUpdateQueue base on the order of timeline in this list
private readonly List<Guid> _allTimelines = new();
private readonly List<Guid> _allTimelines = new List<Guid>();
// bufferd timeline records that fail to update
private readonly Dictionary<Guid, List<TimelineRecord>> _bufferedRetryRecords = new();
private readonly Dictionary<Guid, List<TimelineRecord>> _bufferedRetryRecords = new Dictionary<Guid, List<TimelineRecord>>();
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
private IJobServer _jobServer;
private IResultsServer _resultsServer;
private Task[] _allDequeueTasks;
private readonly TaskCompletionSource<int> _jobCompletionSource = new();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new();
private readonly TaskCompletionSource<int> _jobCompletionSource = new TaskCompletionSource<int>();
private readonly TaskCompletionSource<int> _jobRecordUpdated = new TaskCompletionSource<int>();
private bool _queueInProcess = false;
private bool _resultsServiceOnly = false;
public TaskCompletionSource<int> JobRecordUpdated => _jobRecordUpdated;
@@ -86,49 +79,19 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_jobServer = hostContext.GetService<IJobServer>();
_resultsServer = hostContext.GetService<IResultsServer>();
}
public void Start(Pipelines.AgentJobRequestMessage jobRequest, bool resultServiceOnly = false)
public void Start(Pipelines.AgentJobRequestMessage jobRequest)
{
Trace.Entering();
_resultsServiceOnly = resultServiceOnly;
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (!resultServiceOnly)
{
_jobServer.InitializeWebsocketClient(serviceEndPoint);
}
// This code is usually wrapped by an instance of IExecutionContext which isn't available here.
jobRequest.Variables.TryGetValue("system.github.results_endpoint", out VariableValue resultsEndpointVariable);
var resultsReceiverEndpoint = resultsEndpointVariable?.Value;
if (serviceEndPoint?.Authorization != null &&
serviceEndPoint.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
!string.IsNullOrEmpty(accessToken) &&
!string.IsNullOrEmpty(resultsReceiverEndpoint))
{
string liveConsoleFeedUrl = null;
Trace.Info("Initializing results client");
if (resultServiceOnly
&& serviceEndPoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl)
&& !string.IsNullOrEmpty(feedStreamUrl))
{
liveConsoleFeedUrl = feedStreamUrl;
}
_resultsServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), liveConsoleFeedUrl, accessToken);
_resultsClientInitiated = true;
}
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess)
{
@@ -157,13 +120,10 @@ namespace GitHub.Runner.Common
Trace.Info("Start process file upload queue.");
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask };
_queueInProcess = true;
}
@@ -194,10 +154,6 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
Trace.Verbose("Draining timeline update queue.");
@@ -207,9 +163,6 @@ namespace GitHub.Runner.Common
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info($"Disposing results server ...");
await _resultsServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
@@ -251,45 +204,6 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new ResultsUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
{
ArgUtil.NotEmpty(timelineId, nameof(timelineId));
@@ -323,8 +237,8 @@ namespace GitHub.Runner.Common
}
// Group consolelines by timeline record of each step
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new();
List<Guid> stepRecordIds = new(); // We need to keep lines in order
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new Dictionary<Guid, List<TimelineRecordLogLine>>();
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
int linesCounter = 0;
ConsoleLineInfo lineInfo;
while (_webConsoleLineQueue.TryDequeue(out lineInfo))
@@ -350,7 +264,7 @@ namespace GitHub.Runner.Common
{
// Split consolelines into batch, each batch will container at most 100 lines.
int batchCounter = 0;
List<List<TimelineRecordLogLine>> batchedLines = new();
List<List<TimelineRecordLogLine>> batchedLines = new List<List<TimelineRecordLogLine>>();
foreach (var line in stepsConsoleLines[stepRecordId])
{
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
@@ -385,17 +299,10 @@ namespace GitHub.Runner.Common
{
try
{
// Give at most 60s for each request.
// Give at most 60s for each request.
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)))
{
if (_resultsServiceOnly)
{
await _resultsServer.AppendLiveConsoleFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
}
if (_firstConsoleOutputs)
@@ -431,7 +338,7 @@ namespace GitHub.Runner.Common
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<UploadFileInfo> filesToUpload = new();
List<UploadFileInfo> filesToUpload = new List<UploadFileInfo>();
UploadFileInfo dequeueFile;
while (_fileUploadQueue.TryDequeue(out dequeueFile))
{
@@ -487,105 +394,17 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file uploads.
if (!runOnce && filesToUpload.Count > 10)
{
break;
}
}
if (filesToUpload.Count > 0)
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
}
int errorCount = 0;
foreach (var file in filesToUpload)
{
try
{
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
errorCount++;
// If we hit any exceptions uploading to Results, let's skip any additional uploads to Results
_resultsClientInitiated = false;
SendResultsTelemetry(ex);
}
}
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
{
break;
}
else
{
await Task.Delay(_delayForResultsUploadDequeue);
}
}
}
private void SendResultsTelemetry(Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception with results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
}
private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false)
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<PendingTimelineRecord> pendingUpdates = new();
List<PendingTimelineRecord> pendingUpdates = new List<PendingTimelineRecord>();
foreach (var timeline in _allTimelines)
{
ConcurrentQueue<TimelineRecord> recordQueue;
if (_timelineUpdateQueue.TryGetValue(timeline, out recordQueue))
{
List<TimelineRecord> records = new();
List<TimelineRecord> records = new List<TimelineRecord>();
TimelineRecord record;
while (recordQueue.TryDequeue(out record))
{
@@ -607,7 +426,7 @@ namespace GitHub.Runner.Common
// we need track whether we have new sub-timeline been created on the last run.
// if so, we need continue update timeline record even we on the last run.
bool pendingSubtimelineUpdate = false;
List<Exception> mainTimelineRecordsUpdateErrors = new();
List<Exception> mainTimelineRecordsUpdateErrors = new List<Exception>();
if (pendingUpdates.Count > 0)
{
foreach (var update in pendingUpdates)
@@ -622,7 +441,7 @@ namespace GitHub.Runner.Common
foreach (var detailTimeline in update.PendingRecords.Where(r => r.Details != null))
{
if (!_resultsServiceOnly && !_allTimelines.Contains(detailTimeline.Details.Id))
if (!_allTimelines.Contains(detailTimeline.Details.Id))
{
try
{
@@ -644,27 +463,7 @@ namespace GitHub.Runner.Common
try
{
if (!_resultsServiceOnly)
{
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
try
{
if (_resultsClientInitiated)
{
await _resultsServer.UpdateResultsWorkflowStepsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
}
}
catch (Exception e)
{
Trace.Info("Catch exception during update steps, skip update Results.");
Trace.Error(e);
_resultsClientInitiated = false;
SendResultsTelemetry(e);
}
await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken));
if (_bufferedRetryRecords.Remove(update.TimelineId))
{
Trace.Verbose("Cleanup buffered timeline record for timeline: {0}.", update.TimelineId);
@@ -730,7 +529,7 @@ namespace GitHub.Runner.Common
return timelineRecords;
}
Dictionary<Guid, TimelineRecord> dict = new();
Dictionary<Guid, TimelineRecord> dict = new Dictionary<Guid, TimelineRecord>();
foreach (TimelineRecord rec in timelineRecords)
{
if (rec == null)
@@ -756,17 +555,17 @@ namespace GitHub.Runner.Common
timelineRecord.State = rec.State ?? timelineRecord.State;
timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName;
if (rec.ErrorCount > 0)
if (rec.ErrorCount != null && rec.ErrorCount > 0)
{
timelineRecord.ErrorCount = rec.ErrorCount;
}
if (rec.WarningCount > 0)
if (rec.WarningCount != null && rec.WarningCount > 0)
{
timelineRecord.WarningCount = rec.WarningCount;
}
if (rec.NoticeCount > 0)
if (rec.NoticeCount != null && rec.NoticeCount > 0)
{
timelineRecord.NoticeCount = rec.NoticeCount;
}
@@ -797,7 +596,7 @@ namespace GitHub.Runner.Common
foreach (var record in mergedRecords)
{
Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}");
if (record.Issues != null)
if (record.Issues != null && record.Issues.Count > 0)
{
foreach (var issue in record.Issues)
{
@@ -807,7 +606,7 @@ namespace GitHub.Runner.Common
}
}
if (record.Variables != null)
if (record.Variables != null && record.Variables.Count > 0)
{
foreach (var variable in record.Variables)
{
@@ -818,35 +617,33 @@ namespace GitHub.Runner.Common
return mergedRecords;
}
private async Task UploadFile(UploadFileInfo file)
{
bool uploadSucceed = false;
try
{
if (!_resultsServiceOnly)
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
{
if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase))
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
// Create the log
var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken));
// Upload the contents
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
}
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken));
}
else
// Create a new record and only set the Log field
var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog };
QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord);
}
else
{
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
// Create attachment
using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
}
var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken));
}
}
@@ -868,69 +665,6 @@ namespace GitHub.Runner.Common
}
}
}
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _resultsServer.CreateResultsStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _resultsServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _resultsServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
if (!_resultsClientInitiated)
{
return;
}
bool uploadSucceed = false;
try
{
await uploadHandler(file);
uploadSucceed = true;
}
finally
{
if (uploadSucceed && file.DeleteSource)
{
try
{
File.Delete(file.Path);
}
catch (Exception ex)
{
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Error(ex);
}
}
}
}
}
internal class PendingTimelineRecord
@@ -949,19 +683,6 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class ResultsUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public Guid RecordId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}
internal class ConsoleLineInfo
{

View File

@@ -1,42 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Launch.Client;
using GitHub.Services.WebApi;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(LaunchServer))]
public interface ILaunchServer : IRunnerService
{
void InitializeLaunchClient(Uri uri, string token);
Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList, CancellationToken cancellationToken);
}
public sealed class LaunchServer : RunnerService, ILaunchServer
{
private LaunchHttpClient _launchClient;
public void InitializeLaunchClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._launchClient = new LaunchHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public Task<ActionDownloadInfoCollection> ResolveActionsDownloadInfoAsync(Guid planId, Guid jobId, ActionReferenceList actionReferenceList,
CancellationToken cancellationToken)
{
if (_launchClient != null)
{
return _launchClient.GetResolveActionsDownloadInfoAsync(planId, jobId, actionReferenceList,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Launch client is not initialized.");
}
}
}

View File

@@ -21,12 +21,6 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -38,13 +32,6 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -52,10 +39,8 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -75,17 +60,11 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
Create();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -99,25 +78,21 @@ namespace GitHub.Runner.Common
}
}
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
EndBlock(true);
}
private void Create()
{
NewPage();
}
private void NewPage()
@@ -142,27 +117,5 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -76,7 +76,7 @@ namespace GitHub.Runner.Common
public async Task<WorkerMessage> ReceiveAsync(CancellationToken cancellationToken)
{
WorkerMessage result = new(MessageType.NotInitialized, string.Empty);
WorkerMessage result = new WorkerMessage(MessageType.NotInitialized, string.Empty);
result.MessageType = (MessageType)await _readStream.ReadInt32Async(cancellationToken);
result.Body = await _readStream.ReadStringAsync(cancellationToken);
Trace.Info($"Receiving message of length {result.Body.Length}, with hash '{IOUtil.GetSha256Hash(result.Body)}'");

View File

@@ -291,7 +291,7 @@ namespace GitHub.Runner.Common
public static string GetEnvironmentVariable(this Process process, IHostContext hostContext, string variable)
{
var trace = hostContext.GetTrace(nameof(LinuxProcessExtensions));
Dictionary<string, string> env = new();
Dictionary<string, string> env = new Dictionary<string, string>();
if (Directory.Exists("/proc"))
{
@@ -322,8 +322,8 @@ namespace GitHub.Runner.Common
// It doesn't escape '=' or ' ', so we can't parse the output into a dictionary of all envs.
// So we only look for the env you request, in the format of variable=value. (it won't work if you variable contains = or space)
trace.Info($"Read env from output of `ps e -p {process.Id} -o command`");
List<string> psOut = new();
object outputLock = new();
List<string> psOut = new List<string>();
object outputLock = new object();
using (var p = hostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)

View File

@@ -1,262 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Headers;
using System.Net.WebSockets;
using System.Security;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Results.Client;
using GitHub.Services.WebApi.Utilities.Internal;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(ResultServer))]
public interface IResultsServer : IRunnerService, IAsyncDisposable
{
void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token);
Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
// logging and console
Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken);
Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
}
public sealed class ResultServer : RunnerService, IResultsServer
{
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private DateTime? _lastConnectionFailure;
private static readonly TimeSpan MinDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan MaxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private Task _websocketConnectTask;
private String _liveConsoleFeedUrl;
private string _token;
public void InitializeResultsClient(Uri uri, string liveConsoleFeedUrl, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
_token = token;
if (!string.IsNullOrEmpty(liveConsoleFeedUrl))
{
_liveConsoleFeedUrl = liveConsoleFeedUrl;
InitializeWebsocketClient(liveConsoleFeedUrl, token, TimeSpan.Zero, retryConnection: true);
}
}
public Task CreateResultsStepSummaryAsync(string planId, string jobId, Guid stepId, string file,
CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize,
bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock,
lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock,
long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount,
cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task UpdateResultsWorkflowStepsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId,
IEnumerable<TimelineRecord> records, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
try
{
var timelineRecords = records.ToList();
return _resultsClient.UpdateWorkflowStepsAsync(planId, new List<TimelineRecord>(timelineRecords),
cancellationToken: cancellationToken);
}
catch (Exception ex)
{
// Log error, but continue as this call is best-effort
Trace.Info($"Failed to update steps status due to {ex.GetType().Name}");
Trace.Error(ex);
}
}
throw new InvalidOperationException("Results client is not initialized.");
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void InitializeWebsocketClient(string liveConsoleFeedUrl, string accessToken, TimeSpan delay, bool retryConnection = false)
{
if (string.IsNullOrEmpty(accessToken))
{
Trace.Info($"No access token from server");
return;
}
if (string.IsNullOrEmpty(liveConsoleFeedUrl))
{
Trace.Info($"No live console feed url from server");
return;
}
Trace.Info($"Creating websocket client ..." + liveConsoleFeedUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
var userAgentValues = new List<ProductInfoHeaderValue>();
userAgentValues.AddRange(UserAgentUtility.GetDefaultRestUserAgent());
userAgentValues.AddRange(HostContext.UserAgents);
this._websocketClient.Options.SetRequestHeader("User-Agent", string.Join(" ", userAgentValues.Select(x => x.ToString())));
// during initialization, retry upto 3 times to setup connection
this._websocketConnectTask = ConnectWebSocketClient(liveConsoleFeedUrl, delay, retryConnection);
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay, bool retryConnection = false)
{
bool connected = false;
int retries = 0;
do
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
using var connectTimeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(30));
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), connectTimeoutTokenSource.Token);
Trace.Info($"Successfully started websocket client.");
connected = true;
}
catch (Exception ex)
{
Trace.Info("Exception caught during websocket client connect, retry connection.");
Trace.Error(ex);
retries++;
this._websocketClient = null;
_lastConnectionFailure = DateTime.Now;
}
} while (retryConnection && !connected && retries < 3);
}
public async Task<bool> AppendLiveConsoleFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
{
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
}
bool delivered = false;
int retries = 0;
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket
if (_websocketClient != null)
{
var linesWrapper = startLine.HasValue
? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value)
: new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
delivered = false;
while (!delivered && retries < 3)
{
try
{
if (_websocketClient != null)
{
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage: lastChunk, cancellationToken);
delivered = true;
}
}
catch (Exception ex)
{
var delay = BackoffTimerHelper.GetRandomBackoff(MinDelayForWebsocketReconnect, MaxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms.");
Trace.Verbose(ex.ToString());
retries++;
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, delay);
}
}
}
}
if (!delivered)
{
// Giving up for now, so next invocation of this method won't attempt to reconnect
_websocketClient = null;
// however if 10 minutes have already passed, let's try reestablish connection again
if (_lastConnectionFailure.HasValue && DateTime.Now > _lastConnectionFailure.Value.AddMinutes(10))
{
// Some minutes passed since we retried last time, try connection again
InitializeWebsocketClient(_liveConsoleFeedUrl, _token, TimeSpan.Zero);
}
}
return delivered;
}
private void CloseWebSocket(WebSocketCloseStatus closeStatus, CancellationToken cancellationToken)
{
try
{
_websocketClient?.CloseOutputAsync(closeStatus, "Closing websocket", cancellationToken);
}
catch (Exception websocketEx)
{
// In some cases this might be okay since the websocket might be open yet, so just close and don't trace exceptions
Trace.Info($"Failed to close websocket gracefully {websocketEx.GetType().Name}");
}
}
}
}

View File

@@ -1,14 +1,11 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
using GitHub.Services.WebApi;
namespace GitHub.Runner.Common
{
@@ -18,35 +15,47 @@ namespace GitHub.Runner.Common
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
{
private bool _hasConnection;
private Uri requestUri;
private RawConnection _connection;
private RunServiceHttpClient _runServiceHttpClient;
private VssConnection _connection;
private TaskAgentHttpClient _taskAgentClient;
public async Task ConnectAsync(Uri serverUri, VssCredentials credentials)
public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials)
{
requestUri = serverUri;
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_runServiceHttpClient = await _connection.GetClientAsync<RunServiceHttpClient>();
_connection = await EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100));
_taskAgentClient = _connection.GetClient<TaskAgentHttpClient>();
_hasConnection = true;
}
private async Task<VssConnection> EstablishVssConnection(Uri serverUrl, VssCredentials credentials, TimeSpan timeout)
{
Trace.Info($"EstablishVssConnection");
Trace.Info($"Establish connection with {timeout.TotalSeconds} seconds timeout.");
int attemptCount = 5;
while (attemptCount-- > 0)
{
var connection = VssUtil.CreateConnection(serverUrl, credentials, timeout: timeout);
try
{
await connection.ConnectAsync();
return connection;
}
catch (Exception ex) when (attemptCount > 0)
{
Trace.Info($"Catch exception during connect. {attemptCount} attempt left.");
Trace.Error(ex);
await HostContext.Delay(TimeSpan.FromMilliseconds(100), CancellationToken.None);
}
}
// should never reach here.
throw new InvalidOperationException(nameof(EstablishVssConnection));
}
private void CheckConnection()
{
if (!_hasConnection)
@@ -58,30 +67,37 @@ namespace GitHub.Runner.Common
public Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest<AgentJobRequestMessage>(
async () => await _runServiceHttpClient.GetJobMessageAsync(requestUri, id, cancellationToken), cancellationToken,
shouldRetry: ex => ex is not TaskOrchestrationJobAlreadyAcquiredException);
var jobMessage = RetryRequest<AgentJobRequestMessage>(async () =>
{
return await _taskAgentClient.GetJobMessageAsync(id, cancellationToken);
}, cancellationToken);
return jobMessage;
}
public Task CompleteJobAsync(
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
IList<Annotation> jobAnnotations,
CancellationToken cancellationToken)
private async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
)
{
CheckConnection();
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, jobAnnotations, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
var retryCount = 0;
while (true)
{
retryCount++;
cancellationToken.ThrowIfCancellationRequested();
try
{
return await func();
}
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
{
Trace.Error("Catch exception during get full job message");
Trace.Error(ex);
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
await Task.Delay(backOff, cancellationToken);
}
}
}
}
}

View File

@@ -1,237 +0,0 @@
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Linq;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
public interface IRunnerDotcomServer : IRunnerService
{
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
string GetGitHubRequestId(HttpResponseHeaders headers);
}
public enum RequestType
{
Get,
Post,
Patch,
Delete
}
public class RunnerDotcomServer : RunnerService, IRunnerDotcomServer
{
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = hostContext.GetService<ITerminal>();
}
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
var agents = runnersList.ToTaskAgents();
if (string.IsNullOrEmpty(agentName))
{
return agents;
}
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
}
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
// org runner
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups";
}
}
else if (path.Length == 2)
{
// repo or enterprise runner.
if (!string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or enterprise.");
}
var agentPools = await RetryRequest<RunnerGroupList>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
return agentPools?.ToAgentPoolList();
}
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
{
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
string githubApiUrl;
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runners/register";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runners/register";
}
var bodyObject = new Dictionary<string, Object>()
{
{"url", githubUrl},
{"group_id", runnerGroupId},
{"name", agent.Name},
{"version", agent.Version},
{"updates_disabled", agent.DisableUpdate},
{"ephemeral", agent.Ephemeral},
{"labels", agent.Labels},
{"public_key", publicKey}
};
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
}
private async Task<T> RetryRequest<T>(string githubApiUrl, string githubToken, RequestType requestType, int maxRetryAttemptsCount = 5, string errorMessage = null, StringContent body = null)
{
int retry = 0;
while (true)
{
retry++;
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
HttpResponseMessage response = null;
if (requestType == RequestType.Get)
{
response = await httpClient.GetAsync(githubApiUrl);
}
else
{
response = await httpClient.PostAsync(githubApiUrl, body);
}
if (response != null)
{
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<T>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from '{requestType.ToString()} {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
}
catch (Exception ex) when (retry < maxRetryAttemptsCount && responseStatus != System.Net.HttpStatusCode.NotFound)
{
Trace.Error($"{errorMessage} -- Atempt: {retry}");
Trace.Error(ex);
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
Trace.Info($"Retrying in {backOff.Seconds} seconds");
await Task.Delay(backOff);
}
}
public string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, CancellationToken cancellationToken);
// job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
@@ -179,6 +179,31 @@ namespace GitHub.Runner.Common
}
}
private async Task<VssConnection> EstablishVssConnection(Uri serverUrl, VssCredentials credentials, TimeSpan timeout)
{
Trace.Info($"Establish connection with {timeout.TotalSeconds} seconds timeout.");
int attemptCount = 5;
while (attemptCount-- > 0)
{
var connection = VssUtil.CreateConnection(serverUrl, credentials, timeout: timeout);
try
{
await connection.ConnectAsync();
return connection;
}
catch (Exception ex) when (attemptCount > 0)
{
Trace.Info($"Catch exception during connect. {attemptCount} attempt left.");
Trace.Error(ex);
await HostContext.Delay(TimeSpan.FromMilliseconds(100), CancellationToken.None);
}
}
// should never reach here.
throw new InvalidOperationException(nameof(EstablishVssConnection));
}
private void CheckConnection(RunnerConnectionType connectionType)
{
switch (connectionType)
@@ -272,10 +297,10 @@ namespace GitHub.Runner.Common
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
}
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, CancellationToken cancellationToken)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------

View File

@@ -1,10 +1,4 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Sdk.WebApi.WebApi.RawClient;
using System;
namespace GitHub.Runner.Common
{
@@ -27,9 +21,9 @@ namespace GitHub.Runner.Common
protected IHostContext HostContext { get; private set; }
protected Tracing Trace { get; private set; }
public string TraceName
public string TraceName
{
get
get
{
return GetType().Name;
}
@@ -41,71 +35,5 @@ namespace GitHub.Runner.Common
Trace = HostContext.GetTrace(TraceName);
Trace.Entering();
}
protected async Task<VssConnection> EstablishVssConnection(Uri serverUrl, VssCredentials credentials, TimeSpan timeout)
{
Trace.Info($"EstablishVssConnection");
Trace.Info($"Establish connection with {timeout.TotalSeconds} seconds timeout.");
int attemptCount = 5;
while (attemptCount-- > 0)
{
var connection = VssUtil.CreateConnection(serverUrl, credentials, timeout: timeout);
try
{
await connection.ConnectAsync();
return connection;
}
catch (Exception ex) when (attemptCount > 0)
{
Trace.Info($"Catch exception during connect. {attemptCount} attempt left.");
Trace.Error(ex);
await HostContext.Delay(TimeSpan.FromMilliseconds(100), CancellationToken.None);
}
}
// should never reach here.
throw new InvalidOperationException(nameof(EstablishVssConnection));
}
protected async Task RetryRequest(Func<Task> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
)
{
async Task<Unit> wrappedFunc()
{
await func();
return Unit.Value;
}
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
}
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5,
Func<Exception, bool> shouldRetry = null
)
{
var retryCount = 0;
while (true)
{
retryCount++;
cancellationToken.ThrowIfCancellationRequested();
try
{
return await func();
}
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount && (shouldRetry == null || shouldRetry(ex)))
{
Trace.Error("Catch exception during request");
Trace.Error(ex);
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");
await Task.Delay(backOff, cancellationToken);
}
}
}
}
}

View File

@@ -1,96 +0,0 @@
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
public sealed class StdoutTraceListener : ConsoleTraceListener
{
private readonly string _hostType;
public StdoutTraceListener(string hostType)
{
this._hostType = hostType;
}
// Copied and modified slightly from .Net Core source code. Modification was required to make it compile.
// There must be some TraceFilter extension class that is missing in this source code.
public override void TraceEvent(TraceEventCache eventCache, string source, TraceEventType eventType, int id, string message)
{
if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, message, null, null, null))
{
return;
}
if (!string.IsNullOrEmpty(message))
{
var messageLines = message.Split(Environment.NewLine);
foreach (var messageLine in messageLines)
{
WriteHeader(source, eventType, id);
WriteLine(messageLine);
WriteFooter(eventCache);
}
}
}
internal bool IsEnabled(TraceOptions opts)
{
return (opts & TraceOutputOptions) != 0;
}
// Altered from the original .Net Core implementation.
private void WriteHeader(string source, TraceEventType eventType, int id)
{
string type = null;
switch (eventType)
{
case TraceEventType.Critical:
type = "CRIT";
break;
case TraceEventType.Error:
type = "ERR ";
break;
case TraceEventType.Warning:
type = "WARN";
break;
case TraceEventType.Information:
type = "INFO";
break;
case TraceEventType.Verbose:
type = "VERB";
break;
default:
type = eventType.ToString();
break;
}
Write(StringUtil.Format("[{0} {1:u} {2} {3}] ", _hostType.ToUpperInvariant(), DateTime.UtcNow, type, source));
}
// Copied and modified slightly from .Net Core source code to make it compile. The original code
// accesses a private indentLevel field. In this code it has been modified to use the getter/setter.
private void WriteFooter(TraceEventCache eventCache)
{
if (eventCache == null)
return;
IndentLevel++;
if (IsEnabled(TraceOptions.ProcessId))
WriteLine("ProcessId=" + eventCache.ProcessId);
if (IsEnabled(TraceOptions.ThreadId))
WriteLine("ThreadId=" + eventCache.ThreadId);
if (IsEnabled(TraceOptions.DateTime))
WriteLine("DateTime=" + eventCache.DateTime.ToString("o", CultureInfo.InvariantCulture));
if (IsEnabled(TraceOptions.Timestamp))
WriteLine("Timestamp=" + eventCache.Timestamp);
IndentLevel--;
}
}
}

View File

@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
string ReadSecret();
void Write(string message, ConsoleColor? colorCode = null);
void WriteLine();
void WriteLine(string line, ConsoleColor? colorCode = null, bool skipTracing = false);
void WriteLine(string line, ConsoleColor? colorCode = null);
void WriteError(Exception ex);
void WriteError(string line);
void WriteSection(string message);
@@ -81,7 +81,7 @@ namespace GitHub.Runner.Common
}
// Trace whether a value was entered.
string val = new(chars.ToArray());
string val = new String(chars.ToArray());
if (!string.IsNullOrEmpty(val))
{
HostContext.SecretMasker.AddValue(val);
@@ -116,12 +116,9 @@ namespace GitHub.Runner.Common
// Do not add a format string overload. Terminal messages are user facing and therefore
// should be localized. Use the Loc method in the StringUtil class.
public void WriteLine(string line, ConsoleColor? colorCode = null, bool skipTracing = false)
public void WriteLine(string line, ConsoleColor? colorCode = null)
{
if (!skipTracing)
{
Trace.Info($"WRITE LINE: {line}");
}
Trace.Info($"WRITE LINE: {line}");
if (!Silent)
{
if (colorCode != null)

View File

@@ -14,25 +14,23 @@ namespace GitHub.Runner.Common
public sealed class TraceManager : ITraceManager
{
private readonly ConcurrentDictionary<string, Tracing> _sources = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, Tracing> _sources = new ConcurrentDictionary<string, Tracing>(StringComparer.OrdinalIgnoreCase);
private readonly HostTraceListener _hostTraceListener;
private readonly StdoutTraceListener _stdoutTraceListener;
private TraceSetting _traceSetting;
private ISecretMasker _secretMasker;
public TraceManager(HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener, ISecretMasker secretMasker)
: this(traceListener, stdoutTraceListener, new TraceSetting(), secretMasker)
public TraceManager(HostTraceListener traceListener, ISecretMasker secretMasker)
: this(traceListener, new TraceSetting(), secretMasker)
{
}
public TraceManager(HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener, TraceSetting traceSetting, ISecretMasker secretMasker)
public TraceManager(HostTraceListener traceListener, TraceSetting traceSetting, ISecretMasker secretMasker)
{
// Validate and store params.
ArgUtil.NotNull(traceListener, nameof(traceListener));
ArgUtil.NotNull(traceSetting, nameof(traceSetting));
ArgUtil.NotNull(secretMasker, nameof(secretMasker));
_hostTraceListener = traceListener;
_stdoutTraceListener = stdoutTraceListener;
_traceSetting = traceSetting;
_secretMasker = secretMasker;
@@ -83,7 +81,7 @@ namespace GitHub.Runner.Common
Level = sourceTraceLevel.ToSourceLevels()
};
}
return new Tracing(name, _secretMasker, sourceSwitch, _hostTraceListener, _stdoutTraceListener);
return new Tracing(name, _secretMasker, sourceSwitch, _hostTraceListener);
}
}
}

View File

@@ -12,7 +12,7 @@ namespace GitHub.Runner.Common
private ISecretMasker _secretMasker;
private TraceSource _traceSource;
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener = null)
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener)
{
ArgUtil.NotNull(secretMasker, nameof(secretMasker));
_secretMasker = secretMasker;
@@ -27,10 +27,6 @@ namespace GitHub.Runner.Common
}
_traceSource.Listeners.Add(traceListener);
if (stdoutTraceListener != null)
{
_traceSource.Listeners.Add(stdoutTraceListener);
}
}
public void Info(string message)

View File

@@ -1,8 +0,0 @@
// Represents absence of value.
namespace GitHub.Runner.Common
{
public readonly struct Unit
{
public static readonly Unit Value = default;
}
}

View File

@@ -1,14 +0,0 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -1,209 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Runner.Common.Util;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener
{
public sealed class BrokerMessageListener : RunnerService, IMessageListener
{
private RunnerSettings _settings;
private ITerminal _term;
private TimeSpan _getNextMessageRetryInterval;
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
private IBrokerServer _brokerServer;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_brokerServer = HostContext.GetService<IBrokerServer>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
{
await RefreshBrokerConnection();
return await Task.FromResult(true);
}
public async Task DeleteSessionAsync()
{
await Task.CompletedTask;
}
public void OnJobStatus(object sender, JobStatusEventArgs e)
{
Trace.Info("Received job status event. JobState: {0}", e.Status);
runnerStatus = e.Status;
try
{
_getMessagesTokenSource?.Cancel();
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
}
}
public async Task<TaskAgentMessage> GetNextMessageAsync(CancellationToken token)
{
bool encounteringError = false;
int continuousError = 0;
Stopwatch heartbeat = new();
heartbeat.Restart();
var maxRetryCount = 10;
while (true)
{
TaskAgentMessage message = null;
_getMessagesTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
try
{
message = await _brokerServer.GetRunnerMessageAsync(_getMessagesTokenSource.Token, runnerStatus, BuildConstants.RunnerPackage.Version);
if (message == null)
{
continue;
}
return message;
}
catch (OperationCanceledException) when (_getMessagesTokenSource.Token.IsCancellationRequested && !token.IsCancellationRequested)
{
Trace.Info("Get messages has been cancelled using local token source. Continue to get messages with new status.");
continue;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info("Get next message has been cancelled.");
throw;
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
Trace.Error(ex);
if (!IsGetNextMessageExceptionRetriable(ex))
{
throw;
}
else
{
continuousError++;
//retry after a random backoff to avoid service throttling
//in case of there is a service error happened and all agents get kicked off of the long poll and all agent try to reconnect back at the same time.
if (continuousError <= 5)
{
// random backoff [15, 30]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval);
}
else if (continuousError >= maxRetryCount)
{
throw;
}
else
{
// more aggressive backoff [30, 60]
_getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60), _getNextMessageRetryInterval);
}
if (!encounteringError)
{
//print error only on the first consecutive error
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
encounteringError = true;
}
// re-create VssConnection before next retry
await RefreshBrokerConnection();
Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds);
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
{
_getMessagesTokenSource.Dispose();
}
if (message == null)
{
if (heartbeat.Elapsed > TimeSpan.FromMinutes(30))
{
Trace.Info($"No message retrieved within last 30 minutes.");
heartbeat.Restart();
}
else
{
Trace.Verbose($"No message retrieved.");
}
continue;
}
Trace.Info($"Message '{message.MessageId}' received.");
}
}
public async Task DeleteMessageAsync(TaskAgentMessage message)
{
await Task.CompletedTask;
}
private bool IsGetNextMessageExceptionRetriable(Exception ex)
{
if (ex is TaskAgentNotFoundException ||
ex is TaskAgentPoolNotFoundException ||
ex is TaskAgentSessionExpiredException ||
ex is AccessDeniedException ||
ex is VssUnauthorizedException)
{
Trace.Info($"Non-retriable exception: {ex.Message}");
return false;
}
else
{
Trace.Info($"Retriable exception: {ex.Message}");
return true;
}
}
private async Task RefreshBrokerConnection()
{
var configManager = HostContext.GetService<IConfigurationManager>();
_settings = configManager.LoadSettings();
if (_settings.ServerUrlV2 == null)
{
throw new InvalidOperationException("ServerUrlV2 is not set");
}
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
await _brokerServer.ConnectAsync(new Uri(_settings.ServerUrlV2), creds);
}
}
}

View File

@@ -347,8 +347,8 @@ namespace GitHub.Runner.Listener.Check
public sealed class HttpEventSourceListener : EventListener
{
private readonly List<string> _logs;
private readonly object _lock = new();
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new()
private readonly object _lock = new object();
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new Dictionary<string, HashSet<string>>
{
{
"Microsoft-System-Net-Http",

View File

@@ -86,7 +86,7 @@ namespace GitHub.Runner.Listener.Check
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
// Request to github.com or ghes server
Uri requestUrl = new(url);
Uri requestUrl = new Uri(url);
var env = new Dictionary<string, string>()
{
{ "HOSTNAME", requestUrl.Host },

View File

@@ -11,7 +11,7 @@ namespace GitHub.Runner.Listener
{
public sealed class CommandSettings
{
private readonly Dictionary<string, string> _envArgs = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, string> _envArgs = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
private readonly CommandLineParser _parser;
private readonly IPromptManager _promptManager;
private readonly Tracing _trace;
@@ -26,19 +26,17 @@ namespace GitHub.Runner.Listener
};
// Valid flags and args for specific command - key: command, value: array of valid flags and args
private readonly Dictionary<string, string[]> validOptions = new()
private readonly Dictionary<string, string[]> validOptions = new Dictionary<string, string[]>
{
// Valid configure flags and args
[Constants.Runner.CommandLine.Commands.Configure] =
new string[]
[Constants.Runner.CommandLine.Commands.Configure] =
new string[]
{
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.GenerateServiceConfig,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.NoDefaultLabels,
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
@@ -57,8 +55,7 @@ namespace GitHub.Runner.Listener
new string[]
{
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Flags.Local
Constants.Runner.CommandLine.Args.PAT
},
// Valid run flags and args
[Constants.Runner.CommandLine.Commands.Run] =
@@ -82,14 +79,11 @@ namespace GitHub.Runner.Listener
// Flags.
public bool Check => TestFlag(Constants.Runner.CommandLine.Flags.Check);
public bool Commit => TestFlag(Constants.Runner.CommandLine.Flags.Commit);
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool GenerateServiceConfig => TestFlag(Constants.Runner.CommandLine.Flags.GenerateServiceConfig);
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool NoDefaultLabels => TestFlag(Constants.Runner.CommandLine.Flags.NoDefaultLabels);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
// Keep this around since customers still relies on it
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
@@ -143,7 +137,7 @@ namespace GitHub.Runner.Listener
// Validate commandline parser result
public List<string> Validate()
{
List<string> unknowns = new();
List<string> unknowns = new List<string>();
// detect unknown commands
unknowns.AddRange(_parser.Commands.Where(x => !validOptions.Keys.Contains(x, StringComparer.OrdinalIgnoreCase)));
@@ -184,7 +178,7 @@ namespace GitHub.Runner.Listener
{
command = Constants.Runner.CommandLine.Commands.Warmup;
}
return command;
}

View File

@@ -1,3 +1,10 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Common.Internal;
using GitHub.Services.OAuth;
using System;
using System.Collections.Generic;
using System.Linq;
@@ -7,13 +14,6 @@ using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Common.Internal;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener.Configuration
{
@@ -31,14 +31,12 @@ namespace GitHub.Runner.Listener.Configuration
{
private IConfigurationStore _store;
private IRunnerServer _runnerServer;
private IRunnerDotcomServer _dotcomServer;
private ITerminal _term;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_runnerServer = HostContext.GetService<IRunnerServer>();
_dotcomServer = HostContext.GetService<IRunnerDotcomServer>();
Trace.Verbose("Creating _store");
_store = hostContext.GetService<IConfigurationStore>();
Trace.Verbose("store created");
@@ -83,39 +81,17 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("--------------------------------------------------------------------------------");
Trace.Info(nameof(ConfigureAsync));
if (command.GenerateServiceConfig)
{
#if OS_LINUX
if (!IsConfigured())
{
throw new InvalidOperationException("--generateServiceConfig requires that the runner is already configured. For configuring a new runner as a service, run './config.sh'.");
}
RunnerSettings settings = _store.GetSettings();
Trace.Info($"generate service config for runner: {settings.AgentId}");
var controlManager = HostContext.GetService<ILinuxServiceControlManager>();
controlManager.GenerateScripts(settings);
return;
#else
throw new NotSupportedException("--generateServiceConfig is only supported on Linux.");
#endif
}
if (IsConfigured())
{
throw new InvalidOperationException("Cannot configure the runner because it is already configured. To reconfigure the runner, run 'config.cmd remove' or './config.sh remove' first.");
}
RunnerSettings runnerSettings = new();
RunnerSettings runnerSettings = new RunnerSettings();
// Loop getting url and creds until you can connect
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
string registerToken = string.Empty;
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
@@ -133,11 +109,9 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
runnerSettings.UseV2Flow = authResult.UseV2Flow;
_term.WriteLine($"Using V2 flow: {runnerSettings.UseV2Flow}");
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -181,11 +155,9 @@ namespace GitHub.Runner.Listener.Configuration
// We want to use the native CSP of the platform for storage, so we use the RSACSP directly
RSAParameters publicKey;
var keyManager = HostContext.GetService<IRSAKeyManager>();
string publicKeyXML;
using (var rsa = keyManager.CreateKey())
{
publicKey = rsa.ExportParameters(false);
publicKeyXML = rsa.ToXmlString(includePrivateParameters: false);
}
_term.WriteSection("Runner Registration");
@@ -193,17 +165,9 @@ namespace GitHub.Runner.Listener.Configuration
// If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null;
TaskAgentPool agentPool = null;
List<TaskAgentPool> agentPools;
if (runnerSettings.UseV2Flow)
{
agentPools = await _dotcomServer.GetRunnerGroupsAsync(runnerSettings.GitHubUrl, registerToken);
}
else
{
agentPools = await _runnerServer.GetAgentPoolsAsync();
}
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
{
poolName = command.GetRunnerGroupName(defaultPool?.Name);
@@ -241,16 +205,8 @@ namespace GitHub.Runner.Listener.Configuration
var userLabels = command.GetLabels();
_term.WriteLine();
List<TaskAgent> agents;
if (runnerSettings.UseV2Flow)
{
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
}
else
{
agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
}
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
if (agent != null)
@@ -259,7 +215,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try
{
@@ -293,27 +249,11 @@ namespace GitHub.Runner.Listener.Configuration
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
try
{
if (runnerSettings.UseV2Flow)
{
var runner = await _dotcomServer.AddRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
agent.Id = runner.Id;
agent.Authorization = new TaskAgentAuthorization()
{
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
};
}
else
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
}
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
@@ -364,28 +304,24 @@ namespace GitHub.Runner.Listener.Configuration
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
if (!runnerSettings.UseV2Flow)
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
}
_term.WriteSection("Runner settings");
@@ -554,7 +490,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -571,16 +507,9 @@ namespace GitHub.Runner.Listener.Configuration
agent.Labels.Clear();
if (!noDefaultLabels)
{
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
@@ -590,9 +519,9 @@ namespace GitHub.Runner.Listener.Configuration
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate, bool noDefaultLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
{
TaskAgent agent = new(agentName)
TaskAgent agent = new TaskAgent(agentName)
{
Authorization = new TaskAgentAuthorization
{
@@ -605,16 +534,9 @@ namespace GitHub.Runner.Listener.Configuration
DisableUpdate = disableUpdate
};
if (!noDefaultLabels)
{
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
}
else if (userLabels.Count == 0)
{
throw new NotSupportedException("Disabling default labels via --no-default-labels without specifying --labels is not supported");
}
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
@@ -693,7 +615,7 @@ namespace GitHub.Runner.Listener.Configuration
}
int retryCount = 0;
while (retryCount < 3)
while(retryCount < 3)
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
@@ -703,29 +625,28 @@ namespace GitHub.Runner.Listener.Configuration
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' ({githubRequestId})");
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' (Request Id: {githubRequestId})");
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
@@ -772,23 +693,22 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
if(response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' ({githubRequestId})");
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' (Request Id: {githubRequestId})");
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.Runner.Common;
@@ -18,10 +18,10 @@ namespace GitHub.Runner.Listener.Configuration
public class CredentialManager : RunnerService, ICredentialManager
{
public static readonly Dictionary<string, Type> CredentialTypes = new(StringComparer.OrdinalIgnoreCase)
public static readonly Dictionary<string, Type> CredentialTypes = new Dictionary<string, Type>(StringComparer.OrdinalIgnoreCase)
{
{ Constants.Configuration.OAuth, typeof(OAuthCredential) },
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential) },
{ Constants.Configuration.OAuth, typeof(OAuthCredential)},
{ Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)},
};
public ICredentialProvider GetCredentialProvider(string credType)
@@ -93,9 +93,6 @@ namespace GitHub.Runner.Listener.Configuration
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "use_v2_flow")]
public bool UseV2Flow { get; set; }
public VssCredentials ToVssCredentials()
{
ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema));

View File

@@ -48,7 +48,7 @@ namespace GitHub.Runner.Listener.Configuration
ArgUtil.NotNullOrEmpty(token, nameof(token));
trace.Info("token retrieved: {0} chars", token.Length);
VssCredentials creds = new(new VssOAuthAccessTokenCredential(token), CredentialPromptType.DoNotPrompt);
VssCredentials creds = new VssCredentials(new VssOAuthAccessTokenCredential(token), CredentialPromptType.DoNotPrompt);
trace.Info("cred created");
return creds;

View File

@@ -44,7 +44,7 @@ namespace GitHub.Runner.Listener.Configuration
}
// For the service name, replace any characters outside of the alpha-numeric set and ".", "_", "-" with "-"
Regex regex = new(@"[^0-9a-zA-Z._\-]");
Regex regex = new Regex(@"[^0-9a-zA-Z._\-]");
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);

View File

@@ -1,3 +0,0 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,7 +7,6 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -15,7 +14,6 @@ using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Jwt;
using Sdk.RSWebApi.Contracts;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Listener
@@ -39,8 +37,8 @@ namespace GitHub.Runner.Listener
// and the server will not send another job while this one is still running.
public sealed class JobDispatcher : RunnerService, IJobDispatcher
{
private static Regex _invalidJsonRegex = new(@"invalid\ Json\ at\ position\ '(\d+)':", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private readonly Lazy<Dictionary<long, TaskResult>> _localRunJobResult = new();
private static Regex _invalidJsonRegex = new Regex(@"invalid\ Json\ at\ position\ '(\d+)':", RegexOptions.Compiled | RegexOptions.IgnoreCase);
private readonly Lazy<Dictionary<long, TaskResult>> _localRunJobResult = new Lazy<Dictionary<long, TaskResult>>();
private int _poolId;
IConfigurationStore _configurationStore;
@@ -49,19 +47,17 @@ namespace GitHub.Runner.Listener
private static readonly string _workerProcessName = $"Runner.Worker{IOUtil.ExeExtension}";
// this is not thread-safe
private readonly Queue<Guid> _jobDispatchedQueue = new();
private readonly ConcurrentDictionary<Guid, WorkerDispatcher> _jobInfos = new();
private readonly Queue<Guid> _jobDispatchedQueue = new Queue<Guid>();
private readonly ConcurrentDictionary<Guid, WorkerDispatcher> _jobInfos = new ConcurrentDictionary<Guid, WorkerDispatcher>();
// allow up to 30sec for any data to be transmitted over the process channel
// timeout limit can be overwritten by environment GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT
private TimeSpan _channelTimeout;
private TaskCompletionSource<bool> _runOnceJobCompleted = new();
private TaskCompletionSource<bool> _runOnceJobCompleted = new TaskCompletionSource<bool>();
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -90,8 +86,6 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -117,7 +111,7 @@ namespace GitHub.Runner.Listener
}
}
WorkerDispatcher newDispatch = new(jobRequestMessage.JobId, jobRequestMessage.RequestId);
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
if (runOnce)
{
Trace.Info("Start dispatcher for one time used runner.");
@@ -245,13 +239,6 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -370,11 +357,9 @@ namespace GitHub.Runner.Listener
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new();
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
@@ -384,7 +369,7 @@ namespace GitHub.Runner.Listener
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -406,16 +391,15 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, TaskResult.Canceled);
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
Task<int> workerProcessTask = null;
object _outputLock = new();
List<string> workerOutput = new();
bool printToStdout = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.PrintLogToStdout));
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
@@ -437,15 +421,7 @@ namespace GitHub.Runner.Listener
{
lock (_outputLock)
{
if (!stdout.Data.StartsWith("[WORKER"))
{
workerOutput.Add(stdout.Data);
}
if (printToStdout)
{
term.WriteLine(stdout.Data, skipTracing: true);
}
workerOutput.Add(stdout.Data);
}
}
};
@@ -523,6 +499,7 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -545,14 +522,18 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = await InitializeJobServerAsync(systemConnection);
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
if (detailInfo.Contains(typeof(System.IO.IOException).ToString(), StringComparison.OrdinalIgnoreCase))
{
Trace.Info($"Finish job with result 'Failed' due to IOException.");
await ForceFailJob(jobServer, message, detailInfo);
await ForceFailJob(jobServer, message);
}
}
@@ -567,7 +548,7 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, result, detailInfo);
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
@@ -664,7 +645,7 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, systemConnection, lockToken, resultOnAbandonOrCancel);
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
@@ -677,7 +658,7 @@ namespace GitHub.Runner.Listener
finally
{
Busy = false;
if (JobStatus != null)
{
JobStatus(this, new JobStatusEventArgs(TaskAgentStatus.Online));
@@ -685,128 +666,9 @@ namespace GitHub.Runner.Listener
}
}
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -969,93 +831,90 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var server = await InitializeJobServerAsync(systemConnection);
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
if (server is IJobServer jobServer)
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
foreach (var pages in logPages)
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (updatedRecords.Count > 0)
if (!int.TryParse(logNameParts[2], out pageNumber))
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
else
foreach (var pages in logPages)
{
Trace.Info("Job server does not support log upload yet.");
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
catch (Exception ex)
@@ -1065,7 +924,7 @@ namespace GitHub.Runner.Listener
}
}
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, Guid lockToken, TaskResult result, string detailInfo = null)
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{
Trace.Entering();
@@ -1075,31 +934,9 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (unhandledAnnotation.HasValue)
{
jobAnnotations.Add(unhandledAnnotation.Value);
}
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
List<Exception> exceptions = new List<Exception>();
while (completeJobRequestRetryLimit-- > 0)
{
try
@@ -1133,102 +970,66 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
if (server is IJobServer jobServer)
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
{
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
}
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
else
catch (Exception ex)
{
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", detailInfo);
return;
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message, string detailInfo)
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
{
if (server is IJobServer jobServer)
try
{
try
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
else if (server is IRunServer runServer)
catch (Exception ex)
{
try
{
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = detailInfo };
var unhandledAnnotation = unhandledExceptionIssue.ToAnnotation();
var jobAnnotations = new List<Annotation>();
if (unhandledAnnotation.HasValue)
{
jobAnnotations.Add(unhandledAnnotation.Value);
}
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, jobAnnotations: jobAnnotations, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable
@@ -1238,7 +1039,7 @@ namespace GitHub.Runner.Listener
public Task WorkerDispatch { get; set; }
public CancellationTokenSource WorkerCancellationTokenSource { get; private set; }
public CancellationTokenSource WorkerCancelTimeoutKillTokenSource { get; private set; }
private readonly object _lock = new();
private readonly object _lock = new object();
public WorkerDispatcher(Guid jobId, long requestId)
{

View File

@@ -38,7 +38,7 @@ namespace GitHub.Runner.Listener
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new();
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
private TaskAgentStatus runnerStatus = TaskAgentStatus.Online;
private CancellationTokenSource _getMessagesTokenSource;
@@ -182,7 +182,7 @@ namespace GitHub.Runner.Listener
try
{
_getMessagesTokenSource?.Cancel();
}
}
catch (ObjectDisposedException)
{
Trace.Info("_getMessagesTokenSource is already disposed.");
@@ -198,7 +198,7 @@ namespace GitHub.Runner.Listener
bool encounteringError = false;
int continuousError = 0;
string errorMessage = string.Empty;
Stopwatch heartbeat = new();
Stopwatch heartbeat = new Stopwatch();
heartbeat.Restart();
while (true)
{
@@ -211,7 +211,6 @@ namespace GitHub.Runner.Listener
_session.SessionId,
_lastMessageId,
runnerStatus,
BuildConstants.RunnerPackage.Version,
_getMessagesTokenSource.Token);
// Decrypt the message body if the session is using encryption
@@ -245,10 +244,6 @@ namespace GitHub.Runner.Listener
_accessTokenRevoked = true;
throw;
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
throw;
}
catch (Exception ex)
{
Trace.Error("Catch exception during get next message.");
@@ -293,7 +288,7 @@ namespace GitHub.Runner.Listener
await HostContext.Delay(_getNextMessageRetryInterval, token);
}
}
finally
finally
{
_getMessagesTokenSource.Dispose();
}

View File

@@ -6,7 +6,6 @@ using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Listener
{
@@ -17,7 +16,7 @@ namespace GitHub.Runner.Listener
// Add environment variables from .env file
LoadAndSetEnv();
using (HostContext context = new("Runner"))
using (HostContext context = new HostContext("Runner"))
{
return MainAsync(context, args).GetAwaiter().GetResult();
}
@@ -59,7 +58,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#if ARM64
#if ARM64
// A little hacky, but windows gives no way to differentiate between windows 10 and 11.
// By default only 11 supports native x64 app emulation on arm, so we only want to support windows 11
// https://docs.microsoft.com/en-us/windows/arm/overview#build-windows-apps-that-run-on-arm
@@ -70,7 +69,7 @@ namespace GitHub.Runner.Listener
terminal.WriteLine("Win-arm64 runners require windows 11 or later. Please upgrade your operating system.");
return Constants.Runner.ReturnCode.TerminatedError;
}
#endif
#endif
break;
default:
terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}.");
@@ -138,12 +137,6 @@ namespace GitHub.Runner.Listener
}
}
catch (AccessDeniedException e) when (e.InnerException is InvalidTaskAgentVersionException)
{
terminal.WriteError($"An error occured: {e.Message}");
trace.Error(e);
return Constants.Runner.ReturnCode.TerminatedError;
}
catch (Exception e)
{
terminal.WriteError($"An error occurred: {e.Message}");

View File

@@ -4,13 +4,11 @@ using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -30,7 +28,7 @@ namespace GitHub.Runner.Listener
private IMessageListener _listener;
private ITerminal _term;
private bool _inConfigStage;
private ManualResetEvent _completedCommand = new(false);
private ManualResetEvent _completedCommand = new ManualResetEvent(false);
public override void Initialize(IHostContext hostContext)
{
@@ -137,12 +135,6 @@ namespace GitHub.Runner.Listener
// remove config files, remove service, and exit
if (command.Remove)
{
// only remove local config files and exit
if (command.RemoveLocalConfig)
{
configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success;
}
try
{
await configManager.UnconfigureAsync(command);
@@ -211,16 +203,10 @@ namespace GitHub.Runner.Listener
foreach (var config in jitConfig)
{
var configFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), config.Key);
var configContent = Convert.FromBase64String(config.Value);
#if OS_WINDOWS
if (configFile == HostContext.GetConfigFile(WellKnownConfigFile.RSACredentials))
{
configContent = ProtectedData.Protect(configContent, null, DataProtectionScope.LocalMachine);
}
#endif
File.WriteAllBytes(configFile, configContent);
var configContent = Encoding.UTF8.GetString(Convert.FromBase64String(config.Value));
File.WriteAllText(configFile, configContent, Encoding.UTF8);
File.SetAttributes(configFile, File.GetAttributes(configFile) | FileAttributes.Hidden);
Trace.Info($"Saved {configContent.Length} bytes to '{configFile}'.");
Trace.Info($"Save {configContent.Length} chars to '{configFile}'.");
}
}
catch (Exception ex)
@@ -339,26 +325,13 @@ namespace GitHub.Runner.Listener
}
}
private IMessageListener GetMesageListener(RunnerSettings settings)
{
if (settings.UseV2Flow)
{
Trace.Info($"Using BrokerMessageListener");
var brokerListener = new BrokerMessageListener();
brokerListener.Initialize(HostContext);
return brokerListener;
}
return HostContext.GetService<IMessageListener>();
}
//create worker manager, create message listener and start listening to the queue
private async Task<int> RunAsync(RunnerSettings settings, bool runOnce = false)
{
try
{
Trace.Info(nameof(RunAsync));
_listener = GetMesageListener(settings);
_listener = HostContext.GetService<IMessageListener>();
if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken))
{
return Constants.Runner.ReturnCode.TerminatedError;
@@ -457,22 +430,12 @@ namespace GitHub.Runner.Listener
message = await getNextMessage; //get next message
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase) ||
string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
{
if (autoUpdateInProgress == false)
{
autoUpdateInProgress = true;
AgentRefreshMessage runnerUpdateMessage = null;
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
{
runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
}
else
{
var brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
runnerUpdateMessage = new AgentRefreshMessage(brokerRunnerUpdateMessage.RunnerId, brokerRunnerUpdateMessage.TargetVersion, TimeSpan.FromSeconds(brokerRunnerUpdateMessage.TimeoutInSeconds));
}
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
#if DEBUG
// Can mock the update for testing
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
@@ -523,7 +486,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (MessageUtil.IsRunServiceJob(message.MessageType))
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
{
if (autoUpdateInProgress || runOnceJobReceived)
{
@@ -533,36 +496,16 @@ namespace GitHub.Runner.Listener
else
{
var messageRef = StringUtil.ConvertFromJson<RunnerJobRequestRef>(message.Body);
Pipelines.AgentJobRequestMessage jobRequestMessage = null;
// Create connection
var credMgr = HostContext.GetService<ICredentialManager>();
var creds = credMgr.LoadCredentials();
if (string.IsNullOrEmpty(messageRef.RunServiceUrl))
{
var actionsRunServer = HostContext.CreateService<IActionsRunServer>();
await actionsRunServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
jobRequestMessage = await actionsRunServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
}
else
{
var runServer = HostContext.CreateService<IRunServer>();
await runServer.ConnectAsync(new Uri(messageRef.RunServiceUrl), creds);
try
{
jobRequestMessage =
await runServer.GetJobMessageAsync(messageRef.RunnerRequestId,
messageQueueLoopTokenSource.Token);
}
catch (TaskOrchestrationJobAlreadyAcquiredException)
{
Trace.Info("Job is already acquired, skip this message.");
continue;
}
}
var runServer = HostContext.CreateService<IRunServer>();
await runServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
var jobMessage = await runServer.GetJobMessageAsync(messageRef.RunnerRequestId, messageQueueLoopTokenSource.Token);
jobDispatcher.Run(jobRequestMessage, runOnce);
jobDispatcher.Run(jobMessage, runOnce);
if (runOnce)
{
Trace.Info("One time used runner received job message.");
@@ -683,9 +626,7 @@ Config Options:
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
--labels string Custom labels that will be added to the runner. This option is mandatory if --no-default-labels is used.
--no-default-labels Disables adding the default labels: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--local Removes the runner config files from your local machine. Used as an option to the remove command
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)
--pat GitHub personal access token with repo scope. Used for checking network connectivity when executing `.{separator}run.{ext} --check`

View File

@@ -9,7 +9,5 @@ namespace GitHub.Runner.Listener
public string Id { get; set; }
[DataMember(Name = "runner_request_id")]
public string RunnerRequestId { get; set; }
[DataMember(Name = "run_service_url")]
public string RunServiceUrl { get; set; }
}
}
}

View File

@@ -32,14 +32,14 @@ namespace GitHub.Runner.Listener
private static string _platform = BuildConstants.RunnerPackage.PackageName;
private static string _dotnetRuntime = "dotnetRuntime";
private static string _externals = "externals";
private readonly Dictionary<string, string> _contentHashes = new();
private readonly Dictionary<string, string> _contentHashes = new Dictionary<string, string>();
private PackageMetadata _targetPackage;
private ITerminal _terminal;
private IRunnerServer _runnerServer;
private int _poolId;
private int _agentId;
private readonly ConcurrentQueue<string> _updateTrace = new();
private readonly ConcurrentQueue<string> _updateTrace = new ConcurrentQueue<string>();
private Task _cloneAndCalculateContentHashTask;
private string _dotnetRuntimeCloneDirectory;
private string _externalsCloneDirectory;
@@ -134,7 +134,7 @@ namespace GitHub.Runner.Listener
string flagFile = "update.finished";
IOUtil.DeleteFile(flagFile);
// kick off update script
Process invokeScript = new();
Process invokeScript = new Process();
#if OS_WINDOWS
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
@@ -191,9 +191,9 @@ namespace GitHub.Runner.Listener
}
Trace.Info($"Version '{_targetPackage.Version}' of '{_targetPackage.Type}' package available in server.");
PackageVersion serverVersion = new(_targetPackage.Version);
PackageVersion serverVersion = new PackageVersion(_targetPackage.Version);
Trace.Info($"Current running runner version is {BuildConstants.RunnerPackage.Version}");
PackageVersion runnerVersion = new(BuildConstants.RunnerPackage.Version);
PackageVersion runnerVersion = new PackageVersion(BuildConstants.RunnerPackage.Version);
return serverVersion.CompareTo(runnerVersion) > 0;
}
@@ -476,7 +476,7 @@ namespace GitHub.Runner.Listener
long downloadSize = 0;
//open zip stream in async mode
using (HttpClient httpClient = new(HostContext.CreateHttpClientHandler()))
using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler()))
{
if (!string.IsNullOrEmpty(_targetPackage.Token))
{
@@ -486,7 +486,7 @@ namespace GitHub.Runner.Listener
Trace.Info($"Downloading {packageDownloadUrl}");
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
using (Stream result = await httpClient.GetStreamAsync(packageDownloadUrl))
{
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
@@ -596,7 +596,7 @@ namespace GitHub.Runner.Listener
int exitCode = await processInvoker.ExecuteAsync(extractDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
if (exitCode != 0)
{
throw new NotSupportedException($"Can't use 'tar -xzf' to extract archive file: {archiveFile}. return code: {exitCode}.");
throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}.");
}
}
}

View File

@@ -12,7 +12,7 @@ namespace GitHub.Runner.PluginHost
{
public static class Program
{
private static CancellationTokenSource tokenSource = new();
private static CancellationTokenSource tokenSource = new CancellationTokenSource();
private static string executingAssemblyLocation = string.Empty;
public static int Main(string[] args)

View File

@@ -63,7 +63,7 @@ namespace GitHub.Runner.Plugins.Artifact
string containerPath = actionsStorageArtifact.Name; // In actions storage artifacts, name equals the path
long containerId = actionsStorageArtifact.ContainerId;
FileContainerServer fileContainerServer = new(context.VssConnection, projectId: new Guid(), containerId, containerPath);
FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId: new Guid(), containerId, containerPath);
await fileContainerServer.DownloadFromContainerAsync(context, targetPath, token);
context.Output("Artifact download finished.");

View File

@@ -23,10 +23,10 @@ namespace GitHub.Runner.Plugins.Artifact
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
private const int _defaultCopyBufferSize = 81920;
private readonly ConcurrentQueue<string> _fileUploadQueue = new();
private readonly ConcurrentQueue<DownloadInfo> _fileDownloadQueue = new();
private readonly ConcurrentDictionary<string, ConcurrentQueue<string>> _fileUploadTraceLog = new();
private readonly ConcurrentDictionary<string, ConcurrentQueue<string>> _fileUploadProgressLog = new();
private readonly ConcurrentQueue<string> _fileUploadQueue = new ConcurrentQueue<string>();
private readonly ConcurrentQueue<DownloadInfo> _fileDownloadQueue = new ConcurrentQueue<DownloadInfo>();
private readonly ConcurrentDictionary<string, ConcurrentQueue<string>> _fileUploadTraceLog = new ConcurrentDictionary<string, ConcurrentQueue<string>>();
private readonly ConcurrentDictionary<string, ConcurrentQueue<string>> _fileUploadProgressLog = new ConcurrentDictionary<string, ConcurrentQueue<string>>();
private readonly FileContainerHttpClient _fileContainerHttpClient;
private CancellationTokenSource _uploadCancellationTokenSource;
@@ -67,7 +67,7 @@ namespace GitHub.Runner.Plugins.Artifact
CancellationToken cancellationToken)
{
// Find out all container items need to be processed
List<FileContainerItem> containerItems = new();
List<FileContainerItem> containerItems = new List<FileContainerItem>();
int retryCount = 0;
while (retryCount < 3)
{
@@ -106,7 +106,7 @@ namespace GitHub.Runner.Plugins.Artifact
// Create all required empty folders and emptry files, gather a list of files that we need to download from server.
int foldersCreated = 0;
int emptryFilesCreated = 0;
List<DownloadInfo> downloadFiles = new();
List<DownloadInfo> downloadFiles = new List<DownloadInfo>();
foreach (var item in containerItems.OrderBy(x => x.Path))
{
if (!item.Path.StartsWith(_containerPath, StringComparison.OrdinalIgnoreCase))
@@ -306,7 +306,7 @@ namespace GitHub.Runner.Plugins.Artifact
Task downloadMonitor = DownloadReportingAsync(context, files.Count(), token);
// Start parallel download tasks.
List<Task<DownloadResult>> parallelDownloadingTasks = new();
List<Task<DownloadResult>> parallelDownloadingTasks = new List<Task<DownloadResult>>();
for (int downloader = 0; downloader < concurrentDownloads; downloader++)
{
parallelDownloadingTasks.Add(DownloadAsync(context, downloader, token));
@@ -358,7 +358,7 @@ namespace GitHub.Runner.Plugins.Artifact
Task uploadMonitor = UploadReportingAsync(context, files.Count(), _uploadCancellationTokenSource.Token);
// Start parallel upload tasks.
List<Task<UploadResult>> parallelUploadingTasks = new();
List<Task<UploadResult>> parallelUploadingTasks = new List<Task<UploadResult>>();
for (int uploader = 0; uploader < concurrentUploads; uploader++)
{
parallelUploadingTasks.Add(UploadAsync(context, uploader, _uploadCancellationTokenSource.Token));
@@ -381,8 +381,8 @@ namespace GitHub.Runner.Plugins.Artifact
private async Task<DownloadResult> DownloadAsync(RunnerActionPluginExecutionContext context, int downloaderId, CancellationToken token)
{
List<DownloadInfo> failedFiles = new();
Stopwatch downloadTimer = new();
List<DownloadInfo> failedFiles = new List<DownloadInfo>();
Stopwatch downloadTimer = new Stopwatch();
while (_fileDownloadQueue.TryDequeue(out DownloadInfo fileToDownload))
{
token.ThrowIfCancellationRequested();
@@ -396,7 +396,7 @@ namespace GitHub.Runner.Plugins.Artifact
{
context.Debug($"Start downloading file: '{fileToDownload.ItemPath}' (Downloader {downloaderId})");
downloadTimer.Restart();
using (FileStream fs = new(fileToDownload.LocalPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (FileStream fs = new FileStream(fileToDownload.LocalPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (var downloadStream = await _fileContainerHttpClient.DownloadFileAsync(_containerId, fileToDownload.ItemPath, token, _projectId))
{
await downloadStream.CopyToAsync(fs, _defaultCopyBufferSize, token);
@@ -453,10 +453,10 @@ namespace GitHub.Runner.Plugins.Artifact
private async Task<UploadResult> UploadAsync(RunnerActionPluginExecutionContext context, int uploaderId, CancellationToken token)
{
List<string> failedFiles = new();
List<string> failedFiles = new List<string>();
long uploadedSize = 0;
string fileToUpload;
Stopwatch uploadTimer = new();
Stopwatch uploadTimer = new Stopwatch();
while (_fileUploadQueue.TryDequeue(out fileToUpload))
{
token.ThrowIfCancellationRequested();

View File

@@ -68,7 +68,7 @@ namespace GitHub.Runner.Plugins.Artifact
context.Output($"Uploading artifact '{artifactName}' from '{fullPath}' for run #{buildId}");
FileContainerServer fileContainerHelper = new(context.VssConnection, projectId: Guid.Empty, containerId, artifactName);
FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId: Guid.Empty, containerId, artifactName);
var propertiesDictionary = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
long size = 0;
@@ -87,7 +87,7 @@ namespace GitHub.Runner.Plugins.Artifact
// Definition ID is a dummy value only used by HTTP client routing purposes
int definitionId = 1;
PipelinesServer pipelinesHelper = new(context.VssConnection);
PipelinesServer pipelinesHelper = new PipelinesServer(context.VssConnection);
var artifact = await pipelinesHelper.AssociateActionsStorageArtifactAsync(
definitionId,

View File

@@ -18,7 +18,7 @@ namespace GitHub.Runner.Plugins.Repository
#else
private static readonly Encoding s_encoding = null;
#endif
private readonly Dictionary<string, string> gitEnv = new(StringComparer.OrdinalIgnoreCase)
private readonly Dictionary<string, string> gitEnv = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
{ "GIT_TERMINAL_PROMPT", "0" },
};
@@ -92,11 +92,11 @@ namespace GitHub.Runner.Plugins.Repository
}
// required 2.0, all git operation commandline args need min git version 2.0
Version minRequiredGitVersion = new(2, 0);
Version minRequiredGitVersion = new Version(2, 0);
EnsureGitVersion(minRequiredGitVersion, throwOnNotMatch: true);
// suggest user upgrade to 2.9 for better git experience
Version recommendGitVersion = new(2, 9);
Version recommendGitVersion = new Version(2, 9);
if (!EnsureGitVersion(recommendGitVersion, throwOnNotMatch: false))
{
context.Output($"To get a better Git experience, upgrade your Git to at least version '{recommendGitVersion}'. Your current Git version is '{gitVersion}'.");
@@ -430,7 +430,7 @@ namespace GitHub.Runner.Plugins.Repository
context.Debug($"Inspect remote.origin.url for repository under {repositoryPath}");
Uri fetchUrl = null;
List<string> outputStrings = new();
List<string> outputStrings = new List<string>();
int exitCode = await ExecuteGitCommandAsync(context, repositoryPath, "config", "--get remote.origin.url", outputStrings);
if (exitCode != 0)
@@ -477,7 +477,7 @@ namespace GitHub.Runner.Plugins.Repository
context.Debug($"Checking git config {configKey} exist or not");
// ignore any outputs by redirect them into a string list, since the output might contains secrets.
List<string> outputStrings = new();
List<string> outputStrings = new List<string>();
int exitcode = await ExecuteGitCommandAsync(context, repositoryPath, "config", StringUtil.Format($"--get-all {configKey}"), outputStrings);
return exitcode == 0;
@@ -539,7 +539,7 @@ namespace GitHub.Runner.Plugins.Repository
string runnerWorkspace = context.GetRunnerContext("workspace");
ArgUtil.Directory(runnerWorkspace, "runnerWorkspace");
Version version = null;
List<string> outputStrings = new();
List<string> outputStrings = new List<string>();
int exitCode = await ExecuteGitCommandAsync(context, runnerWorkspace, "version", null, outputStrings);
context.Output($"{string.Join(Environment.NewLine, outputStrings)}");
if (exitCode == 0)
@@ -550,7 +550,7 @@ namespace GitHub.Runner.Plugins.Repository
{
string verString = outputStrings.First();
// we interested about major.minor.patch version
Regex verRegex = new("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase);
Regex verRegex = new Regex("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase);
var matchResult = verRegex.Match(verString);
if (matchResult.Success && !string.IsNullOrEmpty(matchResult.Value))
{
@@ -572,7 +572,7 @@ namespace GitHub.Runner.Plugins.Repository
string runnerWorkspace = context.GetRunnerContext("workspace");
ArgUtil.Directory(runnerWorkspace, "runnerWorkspace");
Version version = null;
List<string> outputStrings = new();
List<string> outputStrings = new List<string>();
int exitCode = await ExecuteGitCommandAsync(context, runnerWorkspace, "lfs version", null, outputStrings);
context.Output($"{string.Join(Environment.NewLine, outputStrings)}");
if (exitCode == 0)
@@ -583,7 +583,7 @@ namespace GitHub.Runner.Plugins.Repository
{
string verString = outputStrings.First();
// we interested about major.minor.patch version
Regex verRegex = new("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase);
Regex verRegex = new Regex("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase);
var matchResult = verRegex.Match(verString);
if (matchResult.Success && !string.IsNullOrEmpty(matchResult.Value))
{

View File

@@ -21,7 +21,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
private const string _remotePullRefsPrefix = "refs/remotes/pull/";
// min git version that support add extra auth header.
private Version _minGitVersionSupportAuthHeader = new(2, 9);
private Version _minGitVersionSupportAuthHeader = new Version(2, 9);
#if OS_WINDOWS
// min git version that support override sslBackend setting.
@@ -29,7 +29,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
#endif
// min git-lfs version that support add extra auth header.
private Version _minGitLfsVersionSupportAuthHeader = new(2, 1);
private Version _minGitLfsVersionSupportAuthHeader = new Version(2, 1);
private void RequirementCheck(RunnerActionPluginExecutionContext executionContext, GitCliManager gitCommandManager, bool checkGitLfs)
{
@@ -83,7 +83,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
{
throw new InvalidOperationException("Repository url need to be an absolute uri.");
@@ -121,7 +121,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
executionContext.Debug($"gitLfsSupport={gitLfsSupport}");
// Initialize git command manager with additional environment variables.
Dictionary<string, string> gitEnv = new(StringComparer.OrdinalIgnoreCase);
Dictionary<string, string> gitEnv = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
// Disable prompting for git credential manager
gitEnv["GCM_INTERACTIVE"] = "Never";
@@ -141,7 +141,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
gitEnv[formattedKey] = variable.Value?.Value ?? string.Empty;
}
GitCliManager gitCommandManager = new(gitEnv);
GitCliManager gitCommandManager = new GitCliManager(gitEnv);
await gitCommandManager.LoadGitExecutionInfo(executionContext);
// Make sure the build machine met all requirements for the git repository
@@ -293,8 +293,8 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader", string.Empty);
}
List<string> additionalFetchArgs = new();
List<string> additionalLfsFetchArgs = new();
List<string> additionalFetchArgs = new List<string>();
List<string> additionalLfsFetchArgs = new List<string>();
// add accessToken as basic auth header to handle auth challenge.
if (!string.IsNullOrEmpty(accessToken))
@@ -320,7 +320,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
}
}
List<string> additionalFetchSpecs = new();
List<string> additionalFetchSpecs = new List<string>();
additionalFetchSpecs.Add("+refs/heads/*:refs/remotes/origin/*");
if (IsPullRequest(sourceBranch))
@@ -395,7 +395,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
throw new InvalidOperationException($"Git submodule sync failed with exit code: {exitCode_submoduleSync}");
}
List<string> additionalSubmoduleUpdateArgs = new();
List<string> additionalSubmoduleUpdateArgs = new List<string>();
if (!string.IsNullOrEmpty(accessToken))
{

View File

@@ -12,7 +12,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
{
public class CheckoutTask : IRunnerActionPlugin
{
private readonly Regex _validSha1 = new(@"\b[0-9a-f]{40}\b", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, TimeSpan.FromSeconds(2));
private readonly Regex _validSha1 = new Regex(@"\b[0-9a-f]{40}\b", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, TimeSpan.FromSeconds(2));
public async Task RunAsync(RunnerActionPluginExecutionContext executionContext, CancellationToken token)
{

View File

@@ -22,7 +22,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
private const string _tagRefsPrefix = "refs/tags/";
// min git version that support add extra auth header.
private Version _minGitVersionSupportAuthHeader = new(2, 9);
private Version _minGitVersionSupportAuthHeader = new Version(2, 9);
#if OS_WINDOWS
// min git version that support override sslBackend setting.
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
#endif
// min git-lfs version that support add extra auth header.
private Version _minGitLfsVersionSupportAuthHeader = new(2, 1);
private Version _minGitLfsVersionSupportAuthHeader = new Version(2, 1);
public static string ProblemMatcher => @"
{
@@ -62,9 +62,9 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
{
// Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext));
Dictionary<string, string> configModifications = new();
Dictionary<string, string> configModifications = new Dictionary<string, string>();
executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new($"https://github.com/{repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
{
throw new InvalidOperationException("Repository url need to be an absolute uri.");
@@ -102,7 +102,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
executionContext.Debug($"gitLfsSupport={gitLfsSupport}");
// Initialize git command manager with additional environment variables.
Dictionary<string, string> gitEnv = new(StringComparer.OrdinalIgnoreCase);
Dictionary<string, string> gitEnv = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
// Disable git prompt
gitEnv["GIT_TERMINAL_PROMPT"] = "0";
@@ -125,7 +125,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
gitEnv[formattedKey] = variable.Value?.Value ?? string.Empty;
}
GitCliManager gitCommandManager = new(gitEnv);
GitCliManager gitCommandManager = new GitCliManager(gitEnv);
await gitCommandManager.LoadGitExecutionInfo(executionContext);
// Make sure the build machine met all requirements for the git repository
@@ -277,8 +277,8 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader", string.Empty);
}
List<string> additionalFetchArgs = new();
List<string> additionalLfsFetchArgs = new();
List<string> additionalFetchArgs = new List<string>();
List<string> additionalLfsFetchArgs = new List<string>();
// Add http.https://github.com.extraheader=... to gitconfig
// accessToken as basic auth header to handle any auth challenge from github.com
@@ -303,7 +303,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
}
}
List<string> additionalFetchSpecs = new();
List<string> additionalFetchSpecs = new List<string>();
additionalFetchSpecs.Add("+refs/heads/*:refs/remotes/origin/*");
if (IsPullRequest(sourceBranch))
@@ -378,7 +378,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
throw new InvalidOperationException($"Git submodule sync failed with exit code: {exitCode_submoduleSync}");
}
List<string> additionalSubmoduleUpdateArgs = new();
List<string> additionalSubmoduleUpdateArgs = new List<string>();
int exitCode_submoduleUpdate = await gitCommandManager.GitSubmoduleUpdate(executionContext, targetPath, fetchDepth, string.Join(" ", additionalSubmoduleUpdateArgs), checkoutNestedSubmodules, cancellationToken);
if (exitCode_submoduleUpdate != 0)
@@ -404,7 +404,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
executionContext.Output($"Cleanup cached git credential from {repositoryPath}.");
// Initialize git command manager
GitCliManager gitCommandManager = new();
GitCliManager gitCommandManager = new GitCliManager();
await gitCommandManager.LoadGitExecutionInfo(executionContext);
executionContext.Debug("Remove any extraheader setting from git config.");
@@ -499,7 +499,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
string gitConfig = Path.Combine(targetPath, ".git/config");
if (File.Exists(gitConfig))
{
List<string> safeGitConfig = new();
List<string> safeGitConfig = new List<string>();
var gitConfigContents = File.ReadAllLines(gitConfig);
foreach (var line in gitConfigContents)
{

View File

@@ -23,7 +23,7 @@ namespace GitHub.Runner.Sdk
private readonly string DebugEnvironmentalVariable = "ACTIONS_STEP_DEBUG";
private VssConnection _connection;
private RunnerWebProxy _webProxy;
private readonly object _stdoutLock = new();
private readonly object _stdoutLock = new object();
private readonly ITraceWriter _trace; // for unit tests
public RunnerActionPluginExecutionContext()
@@ -73,7 +73,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.Add(new ProductInfoHeaderValue($"GitHubActionsRunner-Plugin", BuildConstants.RunnerPackage.Version));
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
@@ -220,7 +220,7 @@ namespace GitHub.Runner.Sdk
return input;
}
private Dictionary<string, string> _commandEscapeMappings = new(StringComparer.OrdinalIgnoreCase)
private Dictionary<string, string> _commandEscapeMappings = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
{
";", "%3B"

View File

@@ -24,18 +24,18 @@ namespace GitHub.Runner.Sdk
private Stopwatch _stopWatch;
private int _asyncStreamReaderCount = 0;
private bool _waitingOnStreams = false;
private readonly AsyncManualResetEvent _outputProcessEvent = new();
private readonly TaskCompletionSource<bool> _processExitedCompletionSource = new();
private readonly CancellationTokenSource _processStandardInWriteCancellationTokenSource = new();
private readonly ConcurrentQueue<string> _errorData = new();
private readonly ConcurrentQueue<string> _outputData = new();
private readonly AsyncManualResetEvent _outputProcessEvent = new AsyncManualResetEvent();
private readonly TaskCompletionSource<bool> _processExitedCompletionSource = new TaskCompletionSource<bool>();
private readonly CancellationTokenSource _processStandardInWriteCancellationTokenSource = new CancellationTokenSource();
private readonly ConcurrentQueue<string> _errorData = new ConcurrentQueue<string>();
private readonly ConcurrentQueue<string> _outputData = new ConcurrentQueue<string>();
private readonly TimeSpan _sigintTimeout = TimeSpan.FromMilliseconds(7500);
private readonly TimeSpan _sigtermTimeout = TimeSpan.FromMilliseconds(2500);
private ITraceWriter Trace { get; set; }
private class AsyncManualResetEvent
{
private volatile TaskCompletionSource<bool> m_tcs = new();
private volatile TaskCompletionSource<bool> m_tcs = new TaskCompletionSource<bool>();
public Task WaitAsync() { return m_tcs.Task; }
@@ -264,17 +264,7 @@ namespace GitHub.Runner.Sdk
{
foreach (KeyValuePair<string, string> kvp in environment)
{
#if OS_WINDOWS
string tempKey = String.IsNullOrWhiteSpace(kvp.Key) ? kvp.Key : kvp.Key.Split('\0')[0];
string tempValue = String.IsNullOrWhiteSpace(kvp.Value) ? kvp.Value : kvp.Value.Split('\0')[0];
if(!String.IsNullOrWhiteSpace(tempKey))
{
_proc.StartInfo.Environment[tempKey] = tempValue;
}
#else
_proc.StartInfo.Environment[kvp.Key] = kvp.Value;
#endif
}
}
@@ -397,8 +387,8 @@ namespace GitHub.Runner.Sdk
private void ProcessOutput()
{
List<string> errorData = new();
List<string> outputData = new();
List<string> errorData = new List<string>();
List<string> outputData = new List<string>();
string errorLine;
while (_errorData.TryDequeue(out errorLine))

View File

@@ -23,9 +23,9 @@ namespace GitHub.Runner.Sdk
private string _httpsProxyPassword;
private string _noProxyString;
private readonly List<ByPassInfo> _noProxyList = new();
private readonly HashSet<string> _noProxyUnique = new(StringComparer.OrdinalIgnoreCase);
private readonly Regex _validIpRegex = new("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", RegexOptions.Compiled);
private readonly List<ByPassInfo> _noProxyList = new List<ByPassInfo>();
private readonly HashSet<string> _noProxyUnique = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private readonly Regex _validIpRegex = new Regex("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", RegexOptions.Compiled);
public string HttpProxyAddress => _httpProxyAddress;
public string HttpProxyUsername => _httpProxyUsername;
@@ -164,6 +164,7 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -206,11 +207,6 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -40,19 +40,10 @@ namespace GitHub.Runner.Sdk
File.WriteAllText(path, StringUtil.ConvertToJson(obj), Encoding.UTF8);
}
public static T LoadObject<T>(string path, bool required = false)
public static T LoadObject<T>(string path)
{
string json = File.ReadAllText(path, Encoding.UTF8);
if (required && string.IsNullOrEmpty(json))
{
throw new ArgumentNullException($"File {path} is empty");
}
T result = StringUtil.ConvertFromJson<T>(json);
if (required && result == null)
{
throw new ArgumentException("Converting json to object resulted in a null value");
}
return result;
return StringUtil.ConvertFromJson<T>(json);
}
public static string GetSha256Hash(string path)
@@ -61,7 +52,7 @@ namespace GitHub.Runner.Sdk
using (SHA256 sha256hash = SHA256.Create())
{
byte[] data = sha256hash.ComputeHash(Encoding.UTF8.GetBytes(hashString));
StringBuilder sBuilder = new();
StringBuilder sBuilder = new StringBuilder();
for (int i = 0; i < data.Length; i++)
{
sBuilder.Append(data[i].ToString("x2"));
@@ -86,7 +77,7 @@ namespace GitHub.Runner.Sdk
public static void DeleteDirectory(string path, bool contentsOnly, bool continueOnContentDeleteError, CancellationToken cancellationToken)
{
ArgUtil.NotNullOrEmpty(path, nameof(path));
DirectoryInfo directory = new(path);
DirectoryInfo directory = new DirectoryInfo(path);
if (!directory.Exists)
{
return;
@@ -372,12 +363,12 @@ namespace GitHub.Runner.Sdk
Directory.CreateDirectory(target);
// Get the file contents of the directory to copy.
DirectoryInfo sourceDir = new(source);
DirectoryInfo sourceDir = new DirectoryInfo(source);
foreach (FileInfo sourceFile in sourceDir.GetFiles() ?? new FileInfo[0])
{
// Check if the file already exists.
cancellationToken.ThrowIfCancellationRequested();
FileInfo targetFile = new(Path.Combine(target, sourceFile.Name));
FileInfo targetFile = new FileInfo(Path.Combine(target, sourceFile.Name));
if (!targetFile.Exists ||
sourceFile.Length != targetFile.Length ||
sourceFile.LastWriteTime != targetFile.LastWriteTime)

View File

@@ -9,7 +9,7 @@ namespace GitHub.Runner.Sdk
public static class StringUtil
{
private static readonly object[] s_defaultFormatArgs = new object[] { null };
private static Lazy<JsonSerializerSettings> s_serializerSettings = new(() =>
private static Lazy<JsonSerializerSettings> s_serializerSettings = new Lazy<JsonSerializerSettings>(() =>
{
var settings = new VssJsonMediaTypeFormatter().SerializerSettings;
settings.DateParseHandling = DateParseHandling.None;
@@ -123,12 +123,5 @@ namespace GitHub.Runner.Sdk
{
return value?.Substring(0, Math.Min(value.Length, count));
}
// Fixes format violations e.g. https://github.com/actions/runner/issues/2165
public static string SanitizeUserAgentHeader(string header)
{
return header.Replace("(", "[").Replace(")", "]").Trim();
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
namespace GitHub.Runner.Sdk
{
@@ -6,17 +6,9 @@ namespace GitHub.Runner.Sdk
{
public static bool IsHostedServer(UriBuilder gitHubUrl)
{
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_FORCE_GHES")))
{
return false;
}
return
string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.com", StringComparison.OrdinalIgnoreCase);
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
}
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)
@@ -29,7 +21,7 @@ namespace GitHub.Runner.Sdk
return baseUrl;
}
UriBuilder credUri = new(baseUrl);
UriBuilder credUri = new UriBuilder(baseUrl);
// ensure we have a username, uribuild will throw if username is empty but password is not.
if (string.IsNullOrEmpty(username))

View File

@@ -9,7 +9,6 @@ using GitHub.Services.OAuth;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using System.Net;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Sdk
{
@@ -19,7 +18,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.AddRange(additionalUserAgents);
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
@@ -35,11 +34,7 @@ namespace GitHub.Runner.Sdk
}
}
public static VssConnection CreateConnection(
Uri serverUri,
VssCredentials credentials,
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
TimeSpan? timeout = null)
public static VssConnection CreateConnection(Uri serverUri, VssCredentials credentials, IEnumerable<DelegatingHandler> additionalDelegatingHandler = null, TimeSpan? timeout = null)
{
VssClientHttpRequestSettings settings = VssClientHttpRequestSettings.Default.Clone();
@@ -76,47 +71,7 @@ namespace GitHub.Runner.Sdk
// settings are applied to an HttpRequestMessage.
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
VssConnection connection = new(serverUri, new VssHttpMessageHandler(credentials, settings), additionalDelegatingHandler);
return connection;
}
public static RawConnection CreateRawConnection(
Uri serverUri,
VssCredentials credentials,
IEnumerable<DelegatingHandler> additionalDelegatingHandler = null,
TimeSpan? timeout = null)
{
RawClientHttpRequestSettings settings = RawClientHttpRequestSettings.Default.Clone();
int maxRetryRequest;
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_RETRY") ?? string.Empty, out maxRetryRequest))
{
maxRetryRequest = 3;
}
// make sure MaxRetryRequest in range [3, 10]
settings.MaxRetryRequest = Math.Min(Math.Max(maxRetryRequest, 3), 10);
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_TIMEOUT") ?? string.Empty, out int httpRequestTimeoutSeconds))
{
settings.SendTimeout = timeout ?? TimeSpan.FromSeconds(100);
}
else
{
// prefer environment variable
settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200));
}
// Remove Invariant from the list of accepted languages.
//
// The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current
// UI culture to the list of accepted languages. The UI culture will be Invariant on OSX/Linux when the
// LANG environment variable is not set when the program starts. If Invariant is in the list of accepted
// languages, then "System.ArgumentException: The value cannot be null or empty." will be thrown when the
// settings are applied to an HttpRequestMessage.
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
VssConnection connection = new VssConnection(serverUri, new VssHttpMessageHandler(credentials, settings), additionalDelegatingHandler);
return connection;
}

View File

@@ -60,7 +60,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
if (matches != null && matches.Length > 0)
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -86,7 +86,7 @@ namespace GitHub.Runner.Sdk
for (int i = 0; i < pathExtSegments.Length; i++)
{
string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}");
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)) && IsPathValid(fullPath, trace))
if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase)))
{
trace?.Info($"Location: '{fullPath}'");
return fullPath;
@@ -105,7 +105,7 @@ namespace GitHub.Runner.Sdk
trace?.Verbose(ex.ToString());
}
if (matches != null && matches.Length > 0 && IsPathValid(matches.First(), trace))
if (matches != null && matches.Length > 0)
{
trace?.Info($"Location: '{matches.First()}'");
return matches.First();
@@ -128,15 +128,5 @@ namespace GitHub.Runner.Sdk
return null;
}
// checks if the file is a symlink and if the symlink`s target exists.
private static bool IsPathValid(string path, ITraceWriter trace = null)
{
var fileInfo = new FileInfo(path);
var linkTargetFullPath = fileInfo.Directory?.FullName + Path.DirectorySeparatorChar + fileInfo.LinkTarget;
if(fileInfo.LinkTarget == null || File.Exists(linkTargetFullPath) || File.Exists(fileInfo.LinkTarget)) return true;
trace?.Info($"the target '{fileInfo.LinkTarget}' of the symbolic link '{path}', does not exist");
return false;
}
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker.Container;
using System;
@@ -21,9 +21,9 @@ namespace GitHub.Runner.Worker
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
{
private const string _stopCommand = "stop-commands";
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new(StringComparer.OrdinalIgnoreCase);
private readonly HashSet<string> _registeredCommands = new(StringComparer.OrdinalIgnoreCase);
private readonly object _commandSerializeLock = new();
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
private readonly HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private readonly object _commandSerializeLock = new object();
private bool _stopProcessCommand = false;
private string _stopToken = null;
@@ -276,7 +276,7 @@ namespace GitHub.Runner.Worker
Message = $"Can't update {blocked} environment variable using ::set-env:: command."
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{envName}";
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
return;
}
@@ -315,7 +315,7 @@ namespace GitHub.Runner.Worker
Message = String.Format(Constants.Runner.UnsupportedCommandMessage, this.Command)
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.UnsupportedCommand;
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
@@ -350,7 +350,7 @@ namespace GitHub.Runner.Worker
Message = String.Format(Constants.Runner.UnsupportedCommandMessage, this.Command)
};
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.UnsupportedCommand;
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
@@ -618,7 +618,7 @@ namespace GitHub.Runner.Worker
context.Debug("Enhanced Annotations not enabled on the server. The 'title', 'end_line', and 'end_column' fields are unsupported.");
}
Issue issue = new()
Issue issue = new Issue()
{
Category = "General",
Type = this.Type,
@@ -666,7 +666,7 @@ namespace GitHub.Runner.Worker
}
}
context.AddIssue(issue, ExecutionContextLogOptions.Default);
context.AddIssue(issue);
}
public static void ValidateLinesAndColumns(ActionCommand command, IExecutionContext context)

View File

@@ -11,14 +11,12 @@ using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Worker
{
@@ -54,16 +52,16 @@ namespace GitHub.Runner.Worker
private const int _defaultCopyBufferSize = 81920;
private const string _dotcomApiUrl = "https://api.github.com";
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new();
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
private readonly Dictionary<Guid, List<Pipelines.ActionStep>> _cachedEmbeddedPreSteps = new();
private readonly Dictionary<Guid, List<Pipelines.ActionStep>> _cachedEmbeddedPreSteps = new Dictionary<Guid, List<Pipelines.ActionStep>>();
public Dictionary<Guid, List<Pipelines.ActionStep>> CachedEmbeddedPreSteps => _cachedEmbeddedPreSteps;
private readonly Dictionary<Guid, List<Guid>> _cachedEmbeddedStepIds = new();
private readonly Dictionary<Guid, List<Guid>> _cachedEmbeddedStepIds = new Dictionary<Guid, List<Guid>>();
public Dictionary<Guid, List<Guid>> CachedEmbeddedStepIds => _cachedEmbeddedStepIds;
private readonly Dictionary<Guid, Stack<Pipelines.ActionStep>> _cachedEmbeddedPostSteps = new();
private readonly Dictionary<Guid, Stack<Pipelines.ActionStep>> _cachedEmbeddedPostSteps = new Dictionary<Guid, Stack<Pipelines.ActionStep>>();
public Dictionary<Guid, Stack<Pipelines.ActionStep>> CachedEmbeddedPostSteps => _cachedEmbeddedPostSteps;
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps, Guid rootStepId = default(Guid))
@@ -102,19 +100,7 @@ namespace GitHub.Runner.Worker
}
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
executionContext.Output("Prepare all required actions");
PrepareActionsState result = new PrepareActionsState();
try
{
result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
}
catch (FailedToResolveActionDownloadInfoException ex)
{
// Log the error and fail the PrepareActionsAsync Initialization.
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
executionContext.InfrastructureError(ex.Message);
executionContext.Result = TaskResult.Failed;
throw;
}
var result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
{
if (state.ImagesToPull.Count > 0)
@@ -662,21 +648,13 @@ namespace GitHub.Runner.Worker
}
// Resolve download info
var launchServer = HostContext.GetService<ILaunchServer>();
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
if (MessageUtil.IsRunServiceJob(executionContext.Global.Variables.Get(Constants.Variables.System.JobRequestType)))
{
actionDownloadInfos = await launchServer.ResolveActionsDownloadInfoAsync(executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
}
else
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
}
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
@@ -813,7 +791,7 @@ namespace GitHub.Runner.Worker
try
{
//open zip stream in async mode
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{

View File

@@ -53,7 +53,7 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionData actionDefinition = new();
ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,

View File

@@ -25,6 +25,7 @@ namespace GitHub.Runner.Worker
public interface IActionRunner : IStep, IRunnerService
{
ActionRunStage Stage { get; set; }
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Pipelines.ActionStep Action { get; set; }
}
@@ -284,67 +285,25 @@ namespace GitHub.Runner.Worker
}
/// <summary>
/// Attempts to update the DisplayName.
/// As the "Try..." name implies, this method should never throw an exception.
/// Returns true if the DisplayName is already present or it was successfully updated.
/// </summary>
public bool TryUpdateDisplayName(out bool updated)
{
updated = false;
// REVIEW: This try/catch can be removed if some future implementation of EvaluateDisplayName and UpdateTimelineRecordDisplayName
// can make reasonable guarantees that they won't throw an exception.
try
{
// This attempt is only worthwhile at the "Main" stage.
// When the job starts, there's an initial attempt to evaluate the DisplayName. (see JobExtension::InitializeJob)
// During the "Pre" stage, we expect that no contexts will have changed since the initial evaluation.
// "Main" stage is handled here.
// During the "Post" stage, it no longer matters.
if (this.Stage == ActionRunStage.Main && EvaluateDisplayName(this.ExecutionContext.ExpressionValues, this.ExecutionContext, out updated))
{
if (updated)
{
this.ExecutionContext.UpdateTimelineRecordDisplayName(this.DisplayName);
}
}
}
catch (Exception ex)
{
Trace.Warning("Caught exception while attempting to evaulate/update the step's DisplayName. Exception Details: {0}", ex);
}
// For consistency with other implementations of TryUpdateDisplayName we use !string.IsNullOrEmpty below,
// but note that (at the time of this writing) ActionRunner::DisplayName::get always returns a non-empty string due to its fallback logic.
// In other words, the net effect is that this particular implementation of TryUpdateDisplayName will always return true.
return !string.IsNullOrEmpty(this.DisplayName);
}
/// <summary>
/// Attempts to evaluate the DisplayName of this IActionRunner.
/// Returns true if the DisplayName is already present or it was successfully evaluated.
/// </summary>
public bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated)
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
{
ArgUtil.NotNull(context, nameof(context));
ArgUtil.NotNull(Action, nameof(Action));
updated = false;
// If we have already expanded the display name, don't bother attempting [re-]expansion.
// If we have already expanded the display name, there is no need to expand it again
// TODO: Remove the ShouldEvaluateDisplayName check and field post m158 deploy, we should do it by default once the server is updated
if (_didFullyEvaluateDisplayName || !string.IsNullOrEmpty(Action.DisplayName))
{
return true;
return false;
}
_displayName = GenerateDisplayName(Action, contextData, context, out bool didFullyEvaluate);
bool didFullyEvaluate;
_displayName = GenerateDisplayName(Action, contextData, context, out didFullyEvaluate);
// If we evaluated, fully mask any secrets
// If we evaluated fully mask any secrets
if (didFullyEvaluate)
{
_displayName = HostContext.SecretMasker.MaskSecrets(_displayName);
updated = true;
}
context.Debug($"Set step '{Action.Name}' display name to: '{_displayName}'");
_didFullyEvaluateDisplayName = didFullyEvaluate;

Some files were not shown because too many files have changed in this diff Show More