Compare commits

..

62 Commits

Author SHA1 Message Date
Ferenc Hammerl
6cff7cd927 Assert in hashfiles that executionContext is available 2023-03-06 23:23:59 +01:00
Ferenc Hammerl
60b29546fc Pass on executioncontext from actionRunner 2023-03-06 23:23:21 +01:00
Ferenc Hammerl
d91c79f677 Add expressionState to EvaluateStepInputs and CreateContext 2023-03-06 23:23:01 +01:00
Ferenc Hammerl
8ef48200b4 Bypass all proxies for all hosts if no_proxy='*' is set (#2395)
* Bypass top level domain even if no_proxy specified it with leading '.'

E.g. no_proxy='.github.com' will now bypass github.com.

* Bypass proxy on all hosts when no_proxy is * (wildcard)

* Undo '.' stripping

* Simplify unit tests

* Respect wildcard even if it's one of many no_proxy items
2023-03-06 11:01:45 +01:00
Tingluo Huang
d61b27b839 Change runner image to make user/folder align with ubuntu-latest hosted runner. (#2469) 2023-03-02 13:42:23 -05:00
Nikola Jokic
542e8a3c98 Runner service exit after consecutive re-try exits (#2426)
* Runner service exit after consecutive re-try exits

* Rename failure counts and include reset count on runner listening for jobs

* Changed from graceful shutdown to stopping=true
2023-02-28 16:00:36 +01:00
Yashwanth Anantharaju
e8975514fd call run service renewjob (#2461)
* call run service renewjob

* format

* formatting

* make it private and expose internals

* lint

* fix exception class

* lint

* fix test as well
2023-02-27 16:50:28 +00:00
Yang Cao
0befa62f64 Add job log upload support (#2447)
* Refactor and add job log upload support

* Rename method to be consistent
2023-02-21 09:55:47 -05:00
Yang Cao
aaf02ab34c Properly guard upload (#2439)
* Revert "Revert "Uploading step logs to Results as well  (#2422)" (#2437)"

This reverts commit 8c096baf49.

* Properly guard the upload to results feature

* Delete skipped file if deletesource is true
2023-02-17 10:31:41 -05:00
JoannaaKL
02c9d1c704 Don't disable lint errors (#2436)
* Update lint.yml

Don't ignore the formatting errors

* Add formatting made by @cory-miller

* Use dotnet format

* Format only modified files

* Add instruction to contribute.md

* Use git status instead of git diff
2023-02-16 14:33:03 +01:00
Tingluo Huang
982784d704 Wait for docker base on env RUNNER_WAIT_FOR_DOCKER_IN_SECONDS. (#2440) 2023-02-15 16:05:36 -05:00
Yang Cao
8c096baf49 Revert "Uploading step logs to Results as well (#2422)" (#2437)
This reverts commit e979331be4.
2023-02-15 11:21:32 -05:00
JoannaaKL
8d6972e38b Don't add Needs constant twice 2023-02-15 10:50:22 +00:00
Tingluo Huang
1ab35b0938 Use v2 version based on https://github.blog/changelog/2023-01-18-code-scanning-codeql-action-v1-is-now-deprecated/ (#2434) 2023-02-14 16:01:52 +00:00
Tingluo Huang
f86e968d38 Prepare runner release 2.302.0 (#2433) 2023-02-14 09:50:09 -05:00
Yang Cao
e979331be4 Uploading step logs to Results as well (#2422)
* Rename queue to results queue

* Add results contracts

* Add Results logging handling

* Adding calls to create and finalize append blob

* Modifications for azurite upload

* Only call upload complete on final section and remove size

* Make method specific to step log so we can support job log later

* Change contract for results

* Add totalline count to the result log upload file

* Actually pass lineCount to Results Service

* Fix typos

* Code cleanup

* Fixing typos

* Apply suggestions from code review

Co-authored-by: Konrad Pabjan <konradpabjan@github.com>

---------

Co-authored-by: Brittany Ellich <brittanyellich@github.com>
Co-authored-by: Konrad Pabjan <konradpabjan@github.com>
2023-02-13 13:18:56 -05:00
Ferenc Hammerl
97195bad58 Replace '(' and ')' with '[' and '] from OS.Description so it doesn't fail User-Agent header validation (#2288)
* Sanitize OS Desc for UserAgents

* Only drop brackets if needed, refactoring

* Add missing ')'

* Readd missing brackets around '(header)'

* Add comments

* Use bracket solution from SDK

* Rename tests
2023-02-08 17:42:27 +01:00
Tingluo Huang
6d1d2460ac Add docker cli to the runner image. (#2425) 2023-02-08 09:21:02 -05:00
Yashwanth Anantharaju
67356a3305 Run service: send more stuff as part of job completed (#2423)
* send more stuff as part of job completed

* feedback

* set only once

* feedback

* feedback

* fix test

* feedback

* nit: spacing

* nit: line

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

---------

Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-02-07 20:10:53 +00:00
John Wesley Walker III
9a228e52e9 Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313)
* Defer evaluation of a step's DisplayName until its condition is evaluated.
* Formalize TryUpdateDisplayName and EvaluateDisplayName as members of interface `IStep` (#2374)
2023-02-07 11:42:30 +01:00
Erez Testiler
3cd76671dd Add support for ghe.com domain (#2420) 2023-02-06 17:16:38 -05:00
Yashwanth Anantharaju
e6e5f36dd0 start calling run service for job completion (#2412)
* start calling run service for job completion

* cleanup

* nit: lines

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* clean up

* give sanity back to thboop

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* add clean up back

* clean up

* clean up more

* oops

* copied from existing, but :thumb:

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

---------

Co-authored-by: Tingluo Huang <tingluohuang@github.com>
Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>
2023-02-01 21:18:31 +00:00
Yashwanth Anantharaju
24a27efd4f fix small bug (#2396) 2023-01-30 10:00:31 -05:00
Tingluo Huang
ca7be16dd3 Bump dotnet sdk to latest version. (#2392)
* Bump dotnet sdk to latest version.

* .

* .

* .

* .

* .

* .

* .

* .
2023-01-23 13:07:49 -05:00
Tingluo Huang
f1c57ac0ef Bump runner version to match the released runner. (#2385)
* Bump runner version to match the released runner.

* .
2023-01-19 00:40:33 +00:00
Tingluo Huang
8581a041a5 Revert "split by regex (#2333)" (#2383)
This reverts commit 72830cfc12.
2023-01-19 00:32:24 +00:00
Tingluo Huang
6412390a22 Prepare 2.301.0 runner release. (#2382)
* Prepare 2.301.0 runner release.

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>

* Update releaseNote.md

Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>
2023-01-18 15:03:24 -05:00
John Sudol
7306014861 Update Node dependencies (#2381)
Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2023-01-18 14:19:28 -05:00
John Sudol
d6f8633efc new option to remove local config files (#2367) 2023-01-18 11:28:43 -05:00
Cory Miller
130f6788d5 Add a disclaimer for which runner version is available to a given tenant (#2362)
* Add a disclaimer for which runner version is available to a given tenant

* Update releaseNote.md
2023-01-18 10:41:53 -05:00
John Sudol
9b390e0531 update node to 16.16.0 (#2371) 2023-01-18 10:41:15 -05:00
yujincat
a7101008a2 Show more information in the runner log (#2377)
* fix typo

* add workflow ref in the log

* show job name for all jobs

* update ref

* reflect the feedback

* fix a small bug
2023-01-18 10:40:35 -05:00
Tingluo Huang
4a6630531b Allow provide extra User-Agent for better correlation. (#2370) 2023-01-16 10:18:55 -05:00
Yang Cao
caec043085 Always upload to avoid issues (#2334)
* Remove unnecessary timelineId and timelineRecordId and use Guid stepId

* Log upload error to kusto

* Remove try-catch

* Using a well known telmetry record to avoid replacing issues

* fix Guid format
2022-12-28 11:56:53 -05:00
Tingluo Huang
a1244d2269 Add Header/Footer to multi-line message in StdoutTraceListener. (#2336) 2022-12-22 10:38:29 -05:00
Nikola Jokic
332b97f838 Treat jitconfig as secret. (#2335)
Co-authored-by: TingluoHuang <TingluoHuang@github.com>
2022-12-21 13:30:22 -05:00
Stefan Ruvceski
72830cfc12 split by regex (#2333)
* split by regex

* pr fix

* adding tests

* test fix
2022-12-20 14:28:29 +01:00
Tingluo Huang
29a28a870f Make runner image print diag log to STDOUT. (#2331) 2022-12-19 16:57:16 -05:00
Tingluo Huang
0dd7a113f1 Log GitHub RequestId for better traceability. (#2332) 2022-12-19 19:46:29 +00:00
TingluoHuang
83b8baa45e Bump runner version to 2.300.2 to match released version. 2022-12-19 14:22:51 -05:00
Ferenc Hammerl
d5e566ad17 Release notes for 2.300.1 (#2326)
* Update runnerversion

* Update releaseNote.md
2022-12-19 13:08:36 -05:00
Bethany
64381cca6a Re-add file size check prior to reading file (#2321) (#2330)
* Re-add file size check prior to reading file

* Remove redundant file size check
2022-12-19 12:56:28 -05:00
Stefan Ruvceski
f1b1532f32 set env in ProcessInvoker sanitized (#2280)
* set env in ProcessInvoker sanitized
2022-12-19 15:01:53 +01:00
Nikola Jokic
04761e5353 Initialize container manager based on whether the ContainerHooksPath is set (#2317)
* Added tests around checking if correct manager's Initialize method has been called

* repaired missing initialization on container action handler
2022-12-16 15:40:49 +01:00
Ava Stancu
f9e2fa939c Updated contact links for feature requests (#2314)
Users need to use the Github Community feedback page for all feature/enhancement requests.
2022-12-15 17:17:41 +02:00
Ferenc Hammerl
92acb625fb Update Dockerfile (#2315) 2022-12-15 15:44:07 +01:00
Ava Stancu
6b9e8a6be4 prepare release notes for 2.300.0 (#2312)
* Update runner version

* Update releaseNote.md

* Update releaseNote.md

* Update releaseNote.md

Co-authored-by: JoannaaKL <joannaakl@github.com>

* Update releaseNote.md

* Update releaseNote.md

Co-authored-by: JoannaaKL <joannaakl@github.com>
Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2022-12-14 10:33:02 +02:00
Brittany Ellich
f41f5d259d Use results for uploading step summaries (#2301)
* Use results service for uploading step summaries

* Use results summary over generic results naming convention

* Apply suggestions from code review

Co-authored-by: Tingluo Huang <tingluohuang@github.com>

* Addressing feedback

* Fix merge issue

* Remove empty line

* Update Results json objects to use snake case

* Adding the reference

Co-authored-by: Yang Cao <yacaovsnc@github.com>
Co-authored-by: Tingluo Huang <tingluohuang@github.com>
2022-12-14 09:28:33 +01:00
Ava Stancu
369a4eccad Made worker logs available to stdout (#2307)
* Made worker logs available to stdout

* Log Worker Standard out line by line

Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2022-12-08 16:23:52 -05:00
Ava Stancu
088981a372 Listener stdout logging (#2291)
* Added env variable to control wether the terminal is silent

* Log to stdout if PrintLogToStdout is enabled

* Extracted console logging to stdouttracelistener

* Remove useless usings

* Rewrite TraceListener as superclass

* Only print to stdout if env is set

* Add comment for Console.Out

* Format Listener

* Revert var name in terminal

* Check env in hostcontext instead of Tracing constructor

* Remove superclass & dupe logging code

* Log hostType

* Readonly '_' prefix 'hostType'

* Fix test

* Revert Terminal change

Co-authored-by: Ferenc Hammerl <31069338+fhammerl@users.noreply.github.com>
2022-12-06 16:16:00 +01:00
Nikola Jokic
852a80fcbd Return exit code when MANUALLY_TRAP_SIG is exported (#2285) 2022-11-28 11:33:01 -05:00
Ferenc Hammerl
63640e91fa Backfill notes and version from 'Release 2.299.1 runner.' (#2277)
* Release 2.299.1 runner.

* Fix typo in releaseNote

Co-authored-by: TingluoHuang <TingluoHuang@github.com>
2022-11-22 15:05:47 +01:00
Nikola Jokic
9122fe7e10 added to lowercase on setting github.action_status (#1944) 2022-11-22 15:05:32 +01:00
Nikola Jokic
6b8452170a (delete.sh) Loggin repaired and made runner_name optional defaulting to hostname (#1871)
* Loggin repaired and made runner_name optional defaulting to hostname

* Update scripts/delete.sh

Co-authored-by: Josh Soref <2119212+jsoref@users.noreply.github.com>

Co-authored-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
2022-11-22 14:47:22 +01:00
Nikola Jokic
cc49e65356 added replace to allow create latest svc to apply --replace flag (#2273) 2022-11-22 14:46:11 +01:00
Tatyana Kostromskaya
1632e4a343 Support runner upgrade messages (#2231)
Co-authored-by: Thomas Boop <52323235+thboop@users.noreply.github.com>
2022-11-21 16:17:47 +01:00
Ava Stancu
b465102e7f Updated info on process for requesting features and enhancements (#2259) 2022-11-18 16:07:59 +01:00
Amit Rathi
98c857b927 expose github.actor_id, github.workflow_ref & github.workflow_sha as environment variable (#2249)
* expose workflow refs/sha  as environment variables

* fixes environment variable ordering

* job_workflow_ref/sha aren't available in gh ctx
2022-11-17 11:11:52 -05:00
Chris Patterson
dda53af485 Small change to Node.js 12 deprecation message (#2262)
* Small change to Node.js 12 deprecation message

* Update src/Runner.Common/Constants.cs
2022-11-16 16:25:59 +01:00
Tingluo Huang
c0bc4c02f8 Add RUNNER_ALLOW_RUNASROOT=1 to dockerfile (#2254) 2022-11-15 12:38:11 -05:00
Nikola Jokic
c6630ce285 Dockerfile and workflow change for runner image (#2250)
* Workflow

* add back github-token to id:image

* added handle to image name

* removed core require

* multi line added :latest

* added label

* added repository_owner in IMAGE_NAME

* with the release

* release

* markdown label description

* Remove markdown desciprion

* Remove double quotes in labels

* Reverted back releaseVersion
2022-11-10 12:08:13 -05:00
Tingluo Huang
40ed7f8a40 Forward parameters into run() func in run.sh. (#2240) 2022-11-02 18:13:40 -04:00
110 changed files with 6624 additions and 4248 deletions

View File

@@ -5,7 +5,10 @@
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
"ghcr.io/devcontainers/features/dotnet": {
"version": "6.0.300"
"version": "6.0.405"
},
"ghcr.io/devcontainers/features/node:1": {
"version": "16"
}
},
"customizations": {

View File

@@ -1,5 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: 🛑 Request a feature in the runner application
url: https://github.com/orgs/community/discussions/categories/actions-and-packages
about: If you have feature requests for GitHub Actions, please use the Actions and Packages section on the Github Product Feedback page.
- name: ✅ Support for GitHub Actions
url: https://github.community/c/code-to-cloud/52
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.

View File

@@ -1,32 +0,0 @@
---
name: 🛑 Request a feature in the runner application
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
title: ''
labels: enhancement
assignees: ''
---
<!--
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
Some additional useful links:
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
-->
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.
**Code Snippet**
If applicable, add a code snippet.
**Additional information**
Add any other context about the feature here.
NOTE: if the feature request has been agreed upon then the assignee will create an ADR. See docs/adrs/README.md

View File

@@ -27,7 +27,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
uses: github/codeql-action/init@v2
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
@@ -38,4 +38,4 @@ jobs:
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1
uses: github/codeql-action/analyze@v2

View File

@@ -18,7 +18,6 @@ jobs:
uses: github/super-linter@v4
env:
DEFAULT_BRANCH: ${{ github.base_ref }}
DISABLE_ERRORS: true
EDITORCONFIG_FILE_NAME: .editorconfig
LINTER_RULES_PATH: /src/
VALIDATE_ALL_CODEBASE: false

65
.github/workflows/publish-image.yml vendored Normal file
View File

@@ -0,0 +1,65 @@
name: Publish Runner Image
on:
workflow_dispatch:
inputs:
runnerVersion:
type: string
description: Version of the runner being installed
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Compute image version
id: image
uses: actions/github-script@v6
with:
script: |
const fs = require('fs');
const inputRunnerVersion = "${{ github.event.inputs.runnerVersion }}"
if (inputRunnerVersion) {
console.log(`Using input runner version ${inputRunnerVersion}`)
core.setOutput('version', inputRunnerVersion);
return
}
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
console.log(`Using runner version ${runnerVersion}`)
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v3
with:
context: ./images
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT

View File

@@ -660,3 +660,52 @@ jobs:
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
publish-image:
needs: release
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/actions-runner
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Compute image version
id: image
uses: actions/github-script@v6
with:
script: |
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
console.log(`Using runner version ${runnerVersion}`)
core.setOutput('version', runnerVersion);
- name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
- name: Log into registry ${{ env.REGISTRY }}
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v3
with:
context: ./images
tags: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.image.outputs.version }}
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
build-args: |
RUNNER_VERSION=${{ steps.image.outputs.version }}
push: true
labels: |
org.opencontainers.image.source=${{github.server_url}}/${{github.repository}}
org.opencontainers.image.description=https://github.com/actions/runner/releases/tag/v${{ steps.image.outputs.version }}
org.opencontainers.image.licenses=MIT

View File

@@ -22,4 +22,4 @@ Runner releases:
## Contribute
We accept contributions in the form of issues and pull requests. [Read more here](docs/contribute.md) before contributing.
We accept contributions in the form of issues and pull requests. The runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page. [Read more about our guidelines here](docs/contribute.md) before contributing.

View File

@@ -1,6 +1,6 @@
# Contributions
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.Please note the runner typically requires changes across the entire system and we aim for issues in the runner to be entirely self contained and fixable here. Therefore, we will primarily handle bug issues opened in this repo and we kindly request you to create all feature and enhancement requests on the [GitHub Feedback](https://github.com/community/community/discussions/categories/actions-and-packages) page.
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
@@ -158,3 +158,11 @@ cat (Runner/Worker)_TIMESTAMP.log # view your log file
We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
### Format C# Code
To format both staged and unstaged .cs files
```
cd ./src
./dev.(cmd|sh) format
```

View File

@@ -35,7 +35,7 @@ All the configs below can be found in `.vscode/launch.json`.
If you launch `Run` or `Run [build]`, it starts a process called `Runner.Listener`.
This process will receive any job queued on this repository if the job runs on matching labels (e.g `runs-on: self-hosted`).
Once a job is received, a `Runner.Listener` starts a new process of `Runner.Worker`.
Since this is a diferent process, you can't use the same debugger session debug it.
Since this is a different process, you can't use the same debugger session debug it.
Instead, a parallel debugging session has to be started, using a different launch config.
Luckily, VS Code supports multiple parallel debugging sessions.

49
images/Dockerfile Normal file
View File

@@ -0,0 +1,49 @@
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0 as build
ARG RUNNER_VERSION
ARG RUNNER_ARCH="x64"
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.2.0
ARG DOCKER_VERSION=20.10.23
RUN apt update -y && apt install curl unzip -y
WORKDIR /actions-runner
RUN curl -f -L -o runner.tar.gz https://github.com/actions/runner/releases/download/v${RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${RUNNER_VERSION}.tar.gz \
&& tar xzf ./runner.tar.gz \
&& rm runner.tar.gz
RUN curl -f -L -o runner-container-hooks.zip https://github.com/actions/runner-container-hooks/releases/download/v${RUNNER_CONTAINER_HOOKS_VERSION}/actions-runner-hooks-k8s-${RUNNER_CONTAINER_HOOKS_VERSION}.zip \
&& unzip ./runner-container-hooks.zip -d ./k8s \
&& rm runner-container-hooks.zip
RUN export DOCKER_ARCH=x86_64 \
&& if [ "$RUNNER_ARCH" = "arm64" ]; then export DOCKER_ARCH=aarch64 ; fi \
&& curl -fLo docker.tgz https://download.docker.com/linux/static/stable/${DOCKER_ARCH}/docker-${DOCKER_VERSION}.tgz \
&& tar zxvf docker.tgz \
&& rm -rf docker.tgz
FROM mcr.microsoft.com/dotnet/runtime-deps:6.0
ENV DEBIAN_FRONTEND=noninteractive
ENV RUNNER_MANUALLY_TRAP_SIG=1
ENV ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT=1
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
sudo \
&& rm -rf /var/lib/apt/lists/*
RUN adduser --disabled-password --gecos "" --uid 1001 runner \
&& groupadd docker --gid 123 \
&& usermod -aG sudo runner \
&& usermod -aG docker runner \
&& echo "%sudo ALL=(ALL:ALL) NOPASSWD:ALL" > /etc/sudoers \
&& echo "Defaults env_keep += \"DEBIAN_FRONTEND\"" >> /etc/sudoers
WORKDIR /home/runner
COPY --chown=runner:docker --from=build /actions-runner .
RUN install -o root -g root -m 755 docker/* /usr/bin/ && rm -rf docker
USER runner

View File

@@ -1,18 +1,19 @@
## Features
- Displays the error logs in dedicated sub-sections of the Initialize containers section (#2182)
- Add generateServiceConfig option for configure command (#2226)
- Setting debug using GitHub Action variables (#2234)
- run.sh installs SIGINT and SIGTERM traps to gracefully stop runner (#2233, 2240)
- Add support for ghe.com domain (#2420)
- Add docker cli to the runner image. (#2425)
## Bugs
- Use Global.Variables instead of JobContext and include action path/ref in the message. (#2214)
- Fix URL construction bug for RunService (#2396)
- Defer evaluation of a step's DisplayName until its condition is evaluated. (#2313)
- Replace '(' and ')' with '[' and '] from OS.Description for fixing User-Agent header validation (#2288)
## Misc
- Allow '--disableupdate' in create-latest-svc.sh (#2201)
- Fix markup for support link (#2114)
- Add runner devcontainer (#2187)
- Setup linter for Runner (#2211, #2213, #2216)
- Bump dotnet sdk to latest version. (#2392)
- Start calling run service for job completion (#2412, #2423)
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
See https://docs.github.com/en/enterprise-cloud@latest/actions/hosting-your-own-runners/adding-self-hosted-runners_
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
2.299.1
<Update to ./src/runnerversion when creating release>

View File

@@ -13,7 +13,7 @@ set -e
flags_found=false
while getopts 's:g:n:r:u:l:d' opt; do
while getopts 's:g:n:r:u:l:df' opt; do
flags_found=true
case $opt in
@@ -35,6 +35,9 @@ while getopts 's:g:n:r:u:l:d' opt; do
l)
labels=$OPTARG
;;
f)
replace='true'
;;
d)
disableupdate='true'
;;
@@ -53,7 +56,8 @@ Usage:
-r optional name of the runner group to add the runner to, defaults to the Default group
-u optional user svc will run as, defaults to current
-l optional list of labels (split by comma) applied on the runner
-d optional allow runner to remain on the current version for one month after the release of a newer version"
-d optional allow runner to remain on the current version for one month after the release of a newer version
-f optional replace any existing runner with the same name"
exit 0
;;
esac
@@ -174,7 +178,7 @@ fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"} ${disableupdate:+--disableupdate}"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"} ${disableupdate:+--disableupdate}
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN ${replace:+--replace} --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"} ${disableupdate:+--disableupdate}
#---------------------------------------
# Configuring as a service

View File

@@ -1,4 +1,4 @@
#/bin/bash
#!/bin/bash
set -e
@@ -12,7 +12,7 @@ set -e
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./delete.sh scope name
# ./delete.sh <scope> [<name>]
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to delete
@@ -26,17 +26,17 @@ set -e
runner_scope=${1}
runner_name=${2}
echo "Deleting runner ${runner_name} @ ${runner_scope}"
function fatal()
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
if [ -z "${runner_name}" ]; then runner_name=`hostname`; fi
echo "Deleting runner ${runner_name} @ ${runner_scope}"
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"

View File

@@ -1 +1 @@
1d709d93e5d3c6c6c656a61aa6c1781050224788a05b0e6ecc4c3c0408bdf89c
39f2a931565d6a10e695ac8ed14bb9dcbb568151410349b32dbf9c27bae29602

View File

@@ -1 +1 @@
b92a47cfeaad02255b1f7a377060651b73ae5e5db22a188dbbcb4183ab03a03d
29ffb303537d8ba674fbebc7729292c21c4ebd17b3198f91ed593ef4cbbb67b5

View File

@@ -1 +1 @@
68a9a8ef0843a8bb74241894f6f63fd76241a82295c5337d3cc7a940a314c78e
de6868a836fa3cb9e5ddddbc079da1c25e819aa2d2fc193cc9931c353687c57c

View File

@@ -1 +1 @@
02c7126ff4d63ee2a0ae390c81434c125630522aadf35903bbeebb1a99d8af99
339d3e1a5fd28450c0fe6cb820cc7aae291f0f9e2d153ac34e1f7b080e35d30e

View File

@@ -1 +1 @@
c9d5a542f8d765168855a89e83ae0a8970d00869041c4f9a766651c04c72b212
dcb7f606c1d7d290381e5020ee73e7f16dcbd2f20ac9b431362ccbb5120d449c

View File

@@ -1 +1 @@
39d0683f0f115a211cb10c473e9574c16549a19d4e9a6c637ded3d7022bf809f
1bbcb0e9a2cf4be4b1fce77458de139b70ac58efcbb415a6db028b9373ae1673

View File

@@ -1 +1 @@
d94f2fbaf210297162bc9f3add819d73682c3aa6899e321c3872412b924d5504
44cd25f3c104d0abb44d262397a80e0b2c4f206465c5d899a22eec043dac0fb3

View File

@@ -1 +1 @@
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
3807dcbf947e840c33535fb466b096d76bf09e5c0254af8fc8cbbb24c6388222

View File

@@ -1 +1 @@
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
ee01eee80cd8a460a4b9780ee13fdd20f25c59e754b4ccd99df55fbba2a85634

View File

@@ -1 +1 @@
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
a9fb9c14e24e79aec97d4da197dd7bfc6364297d6fce573afb2df48cc9a931f8

View File

@@ -1 +1 @@
cc4708962a80325de0baa5ae8484e0cb9ae976ac6a4178c1c0d448b8c52bd7f7
a4e0e8fc62eba0967a39c7d693dcd0aeb8b2bed0765f9c38df80d42884f65341

View File

@@ -1 +1 @@
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
17ac17fbe785b3d6fa2868d8d17185ebfe0c90b4b0ddf6b67eac70e42bcd989b

View File

@@ -1 +1 @@
e5dace2d41cc0682d096dcce4970079ad48ec7107e46195970eecfdb3df2acef
89f24657a550f1e818b0e9975e5b80edcf4dd22b7d4bccbb9e48e37f45d30fb1

View File

@@ -1 +1 @@
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
24fd131b5dce33ef16038b771407bc0507da8682a72fb3b7780607235f76db0b

View File

@@ -14,7 +14,7 @@
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@zeit/ncc": "^0.20.5",
"@vercel/ncc": "^0.36.0",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"prettier": "^1.19.1",
@@ -346,11 +346,10 @@
"url": "https://opencollective.com/typescript-eslint"
}
},
"node_modules/@zeit/ncc": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.20.5.tgz",
"integrity": "sha512-XU6uzwvv95DqxciQx+aOLhbyBx/13ky+RK1y88Age9Du3BlA4mMPCy13BGjayOrrumOzlq1XV3SD/BWiZENXlw==",
"deprecated": "@zeit/ncc is no longer maintained. Please use @vercel/ncc instead.",
"node_modules/@vercel/ncc": {
"version": "0.36.0",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.0.tgz",
"integrity": "sha512-/ZTUJ/ZkRt694k7KJNimgmHjtQcRuVwsST2Z6XfYveQIuBbHR+EqkTc1jfgPkQmMyk/vtpxo3nVxe8CNuau86A==",
"dev": true,
"bin": {
"ncc": "dist/ncc/cli.js"
@@ -1722,9 +1721,9 @@
"dev": true
},
"node_modules/json5": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"dev": true,
"dependencies": {
"minimist": "^1.2.0"
@@ -1824,9 +1823,9 @@
}
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@@ -2747,10 +2746,10 @@
"eslint-visitor-keys": "^3.0.0"
}
},
"@zeit/ncc": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.20.5.tgz",
"integrity": "sha512-XU6uzwvv95DqxciQx+aOLhbyBx/13ky+RK1y88Age9Du3BlA4mMPCy13BGjayOrrumOzlq1XV3SD/BWiZENXlw==",
"@vercel/ncc": {
"version": "0.36.0",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.0.tgz",
"integrity": "sha512-/ZTUJ/ZkRt694k7KJNimgmHjtQcRuVwsST2Z6XfYveQIuBbHR+EqkTc1jfgPkQmMyk/vtpxo3nVxe8CNuau86A==",
"dev": true
},
"acorn": {
@@ -3756,9 +3755,9 @@
"dev": true
},
"json5": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz",
"integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
"integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
"dev": true,
"requires": {
"minimist": "^1.2.0"
@@ -3840,9 +3839,9 @@
}
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"requires": {
"brace-expansion": "^1.1.7"
}

View File

@@ -26,7 +26,7 @@
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@zeit/ncc": "^0.20.5",
"@vercel/ncc": "^0.36.0",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"prettier": "^1.19.1",

View File

@@ -5,7 +5,7 @@ PRECACHE=$2
NODE_URL=https://nodejs.org/dist
UNOFFICIAL_NODE_URL=https://unofficial-builds.nodejs.org/download/release
NODE12_VERSION="12.22.7"
NODE16_VERSION="16.13.0"
NODE16_VERSION="16.16.0"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args

View File

@@ -24,7 +24,8 @@ if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var unknownFailureRetryCount = 0;
var retriableFailureRetryCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
@@ -62,7 +63,8 @@ var runService = function () {
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount = 0;
}
process.stdout.write(data.toString("utf8"));
});
@@ -92,24 +94,38 @@ var runService = function () {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
unknownFailureRetryCount = 0;
retriableFailureRetryCount++;
if (retriableFailureRetryCount >= 10) {
console.error(
"Stopping the runner after 10 consecutive re-tryable failures"
);
stopping = true;
}
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
unknownFailureRetryCount++;
retriableFailureRetryCount = 0;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
unknownFailureRetryCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
`${messagePrefix}, exiting service after ${unknownFailureRetryCount} consecutive failures`
);
gracefulShutdown();
return;
stopping = true
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}

File diff suppressed because it is too large Load Diff

View File

@@ -18,6 +18,20 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Wait for docker to start
if [ ! -z "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" ]; then
if [ "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS" -gt 0 ]; then
echo "Waiting for docker to be ready."
for i in $(seq "$RUNNER_WAIT_FOR_DOCKER_IN_SECONDS"); do
if docker ps > /dev/null 2>&1; then
echo "Docker is ready."
break
fi
"$DIR"/safe_sleep.sh 1
done
fi
fi
updateFile="update.finished"
"$DIR"/bin/Runner.Listener run $*

View File

@@ -48,7 +48,7 @@ runWithManualTrap() {
trap - INT TERM
# wait for last parts to be logged
wait $PID
exit 0
exit $returnCode
fi
done
}

View File

@@ -74,6 +74,7 @@ Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.522.21309.dll
mscordaccore_arm64_arm64_6.0.522.21309.dll
mscordaccore_amd64_amd64_6.0.1322.58009.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll

View File

@@ -90,7 +90,6 @@ namespace GitHub.Runner.Common
public static class Args
{
public static readonly string Auth = "auth";
public static readonly string JitConfig = "jitconfig";
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
@@ -105,11 +104,13 @@ namespace GitHub.Runner.Common
public static readonly string Token = "token";
public static readonly string PAT = "pat";
public static readonly string WindowsLogonPassword = "windowslogonpassword";
public static readonly string JitConfig = "jitconfig";
public static string[] Secrets => new[]
{
PAT,
Token,
WindowsLogonPassword,
JitConfig,
};
}
@@ -130,6 +131,7 @@ namespace GitHub.Runner.Common
public static readonly string Ephemeral = "ephemeral";
public static readonly string GenerateServiceConfig = "generateServiceConfig";
public static readonly string Help = "help";
public static readonly string Local = "local";
public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it
@@ -157,14 +159,17 @@ namespace GitHub.Runner.Common
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly Guid TelemetryRecordId = new Guid("11111111-1111-1111-1111-111111111111");
public static readonly string WorkerCrash = "WORKER_CRASH";
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string ResultsUploadFailure = "RESULTS_UPLOAD_FAILURE";
public static readonly string UnsupportedCommandMessage = "The `{0}` command is deprecated and will be disabled soon. Please upgrade to using Environment Files. For more information see: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/. Please update the following actions to use Node.js 16: {0}";
public static readonly string SummaryUploadError = "$GITHUB_STEP_SUMMARY upload aborted, an error occurred when uploading the summary. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string Node12DetectedAfterEndOfLife = "Node.js 12 actions are deprecated. Please update the following actions to use Node.js 16: {0}. For more information see: https://github.blog/changelog/2022-09-22-github-actions-all-actions-will-begin-running-on-node16-instead-of-node12/.";
}
public static class RunnerEvent
@@ -245,6 +250,7 @@ namespace GitHub.Runner.Common
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
public static readonly string ForcedActionsNodeVersion = "ACTIONS_RUNNER_FORCE_ACTIONS_NODE_VERSION";
public static readonly string PrintLogToStdout = "ACTIONS_RUNNER_PRINT_LOG_TO_STDOUT";
}
public static class System

View File

@@ -94,6 +94,13 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPreAmpersandEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.PowerShellPostAmpersandEscape);
// Create StdoutTraceListener if ENV is set
StdoutTraceListener stdoutTraceListener = null;
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.PrintLogToStdout)))
{
stdoutTraceListener = new StdoutTraceListener(hostType);
}
// Create the trace manager.
if (string.IsNullOrEmpty(logFile))
{
@@ -113,11 +120,11 @@ namespace GitHub.Runner.Common
// this should give us _diag folder under runner root directory
string diagLogDirectory = Path.Combine(new DirectoryInfo(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location)).Parent.FullName, Constants.Path.DiagDirectory);
_traceManager = new TraceManager(new HostTraceListener(diagLogDirectory, hostType, logPageSize, logRetentionDays), this.SecretMasker);
_traceManager = new TraceManager(new HostTraceListener(diagLogDirectory, hostType, logPageSize, logRetentionDays), stdoutTraceListener, this.SecretMasker);
}
else
{
_traceManager = new TraceManager(new HostTraceListener(logFile), this.SecretMasker);
_traceManager = new TraceManager(new HostTraceListener(logFile), stdoutTraceListener, this.SecretMasker);
}
_trace = GetTrace(nameof(HostContext));
@@ -219,6 +226,20 @@ namespace GitHub.Runner.Common
}
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
var extraUserAgent = Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_EXTRA_USER_AGENT");
if (!string.IsNullOrEmpty(extraUserAgent))
{
var extraUserAgentSplit = extraUserAgent.Split('/', StringSplitOptions.RemoveEmptyEntries);
if (extraUserAgentSplit.Length != 2)
{
_trace.Error($"GITHUB_ACTIONS_RUNNER_EXTRA_USER_AGENT is not in the format of 'name/version'.");
}
var extraUserAgentHeader = new ProductInfoHeaderValue(extraUserAgentSplit[0], extraUserAgentSplit[1]);
_trace.Info($"Adding extra user agent '{extraUserAgentHeader}' to all HTTP requests.");
_userAgents.Add(extraUserAgentHeader);
}
}
public string GetDirectory(WellKnownDirectory directory)

View File

@@ -13,6 +13,8 @@ using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using GitHub.Services.WebApi.Utilities.Internal;
using GitHub.Services.Results.Client;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Common
{
@@ -22,11 +24,15 @@ namespace GitHub.Runner.Common
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
void InitializeResultsClient(Uri uri, string token);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken);
Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
@@ -40,6 +46,7 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ResultsHttpClient _resultsClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
@@ -143,6 +150,12 @@ namespace GitHub.Runner.Common
InitializeWebsocketClient(TimeSpan.Zero);
}
public void InitializeResultsClient(Uri uri, string token)
{
var httpMessageHandler = HostContext.CreateHttpClientHandler();
this._resultsClient = new ResultsHttpClient(uri, httpMessageHandler, token, disposeHandler: true);
}
public ValueTask DisposeAsync()
{
CloseWebSocket(WebSocketCloseStatus.NormalClosure, CancellationToken.None);
@@ -305,6 +318,33 @@ namespace GitHub.Runner.Common
return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken);
}
public Task CreateStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadStepSummaryAsync(planId, jobId, stepId, file, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsStepLogAsync(planId, jobId, stepId, file, finalize, firstBlock, lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task CreateResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
if (_resultsClient != null)
{
return _resultsClient.UploadResultsJobLogAsync(planId, jobId, file, finalize, firstBlock, lineCount, cancellationToken: cancellationToken);
}
throw new InvalidOperationException("Results client is not initialized.");
}
public Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken)
{
CheckConnection();

View File

@@ -20,6 +20,7 @@ namespace GitHub.Runner.Common
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -30,6 +31,7 @@ namespace GitHub.Runner.Common
private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500);
private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000);
private static readonly TimeSpan _delayForResultsUploadDequeue = TimeSpan.FromMilliseconds(1000);
// Job message information
private Guid _scopeIdentifier;
@@ -44,6 +46,8 @@ namespace GitHub.Runner.Common
// queue for file upload (log file or attachment)
private readonly ConcurrentQueue<UploadFileInfo> _fileUploadQueue = new();
private readonly ConcurrentQueue<ResultsUploadFileInfo> _resultsFileUploadQueue = new();
// queue for timeline or timeline record update (one queue per timeline)
private readonly ConcurrentDictionary<Guid, ConcurrentQueue<TimelineRecord>> _timelineUpdateQueue = new();
@@ -56,6 +60,7 @@ namespace GitHub.Runner.Common
// Task for each queue's dequeue process
private Task _webConsoleLineDequeueTask;
private Task _fileUploadDequeueTask;
private Task _resultsUploadDequeueTask;
private Task _timelineUpdateDequeueTask;
// common
@@ -79,6 +84,9 @@ namespace GitHub.Runner.Common
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
private bool _resultsClientInitiated = false;
private delegate Task ResultsFileUploadHandler(ResultsUploadFileInfo file);
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -93,6 +101,20 @@ namespace GitHub.Runner.Common
_jobServer.InitializeWebsocketClient(serviceEndPoint);
// This code is usually wrapped by an instance of IExecutionContext which isn't available here.
jobRequest.Variables.TryGetValue("system.github.results_endpoint", out VariableValue resultsEndpointVariable);
var resultsReceiverEndpoint = resultsEndpointVariable?.Value;
if (serviceEndPoint?.Authorization != null &&
serviceEndPoint.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
!string.IsNullOrEmpty(accessToken) &&
!string.IsNullOrEmpty(resultsReceiverEndpoint))
{
Trace.Info("Initializing results client");
_jobServer.InitializeResultsClient(new Uri(resultsReceiverEndpoint), accessToken);
_resultsClientInitiated = true;
}
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -120,10 +142,13 @@ namespace GitHub.Runner.Common
Trace.Info("Start process file upload queue.");
_fileUploadDequeueTask = ProcessFilesUploadQueueAsync();
Trace.Info("Start results file upload queue.");
_resultsUploadDequeueTask = ProcessResultsUploadQueueAsync();
Trace.Info("Start process timeline update queue.");
_timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync();
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask };
_allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask, _resultsUploadDequeueTask };
_queueInProcess = true;
}
@@ -154,6 +179,10 @@ namespace GitHub.Runner.Common
await ProcessFilesUploadQueueAsync(runOnce: true);
Trace.Info("File upload queue drained.");
Trace.Verbose("Draining results upload queue.");
await ProcessResultsUploadQueueAsync(runOnce: true);
Trace.Info("Results upload queue drained.");
// ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown
// if there is any timeline records that failed to update contains output variabls.
Trace.Verbose("Draining timeline update queue.");
@@ -204,6 +233,45 @@ namespace GitHub.Runner.Common
_fileUploadQueue.Enqueue(newFile);
}
public void QueueResultsUpload(Guid timelineRecordId, string name, string path, string type, bool deleteSource, bool finalize, bool firstBlock, long totalLines)
{
if (!_resultsClientInitiated)
{
Trace.Verbose("Skipping results upload");
try
{
if (deleteSource)
{
File.Delete(path);
}
}
catch (Exception ex)
{
Trace.Info("Catch exception during delete skipped results upload file.");
Trace.Error(ex);
}
return;
}
// all parameter not null, file path exist.
var newFile = new ResultsUploadFileInfo()
{
Name = name,
Path = path,
Type = type,
PlanId = _planId.ToString(),
JobId = _jobTimelineRecordId.ToString(),
RecordId = timelineRecordId,
DeleteSource = deleteSource,
Finalize = finalize,
FirstBlock = firstBlock,
TotalLines = totalLines,
};
Trace.Verbose("Enqueue results file upload queue: file '{0}' attach to job {1} step {2}", newFile.Path, _jobTimelineRecordId, timelineRecordId);
_resultsFileUploadQueue.Enqueue(newFile);
}
public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord)
{
ArgUtil.NotEmpty(timelineId, nameof(timelineId));
@@ -299,7 +367,7 @@ namespace GitHub.Runner.Common
{
try
{
// Give at most 60s for each request.
// Give at most 60s for each request.
using (var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)))
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, timeoutTokenSource.Token);
@@ -394,6 +462,86 @@ namespace GitHub.Runner.Common
}
}
private async Task ProcessResultsUploadQueueAsync(bool runOnce = false)
{
Trace.Info("Starting results-based upload queue...");
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
{
List<ResultsUploadFileInfo> filesToUpload = new();
ResultsUploadFileInfo dequeueFile;
while (_resultsFileUploadQueue.TryDequeue(out dequeueFile))
{
filesToUpload.Add(dequeueFile);
// process at most 10 file uploads.
if (!runOnce && filesToUpload.Count > 10)
{
break;
}
}
if (filesToUpload.Count > 0)
{
if (runOnce)
{
Trace.Info($"Uploading {filesToUpload.Count} file(s) in one shot through results service.");
}
int errorCount = 0;
foreach (var file in filesToUpload)
{
try
{
if (String.Equals(file.Type, ChecksAttachmentType.StepSummary, StringComparison.OrdinalIgnoreCase))
{
await UploadSummaryFile(file);
}
else if (String.Equals(file.Type, CoreAttachmentType.ResultsLog, StringComparison.OrdinalIgnoreCase))
{
if (file.RecordId != _jobTimelineRecordId)
{
Trace.Info($"Got a step log file to send to results service.");
await UploadResultsStepLogFile(file);
}
else if (file.RecordId == _jobTimelineRecordId)
{
Trace.Info($"Got a job log file to send to results service.");
await UploadResultsJobLogFile(file);
}
}
}
catch (Exception ex)
{
var issue = new Issue() { Type = IssueType.Warning, Message = $"Caught exception during file upload to results. {ex.Message}" };
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.ResultsUploadFailure;
var telemetryRecord = new TimelineRecord()
{
Id = Constants.Runner.TelemetryRecordId,
};
telemetryRecord.Issues.Add(issue);
QueueTimelineRecordUpdate(_jobTimelineId, telemetryRecord);
Trace.Info("Catch exception during file upload to results, keep going since the process is best effort.");
Trace.Error(ex);
errorCount++;
}
}
Trace.Info("Tried to upload {0} file(s) to results, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount);
}
if (runOnce)
{
break;
}
else
{
await Task.Delay(_delayForResultsUploadDequeue);
}
}
}
private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false)
{
while (!_jobCompletionSource.Task.IsCompleted || runOnce)
@@ -665,6 +813,64 @@ namespace GitHub.Runner.Common
}
}
}
private async Task UploadSummaryFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting to upload summary file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler summaryHandler = async (file) =>
{
await _jobServer.CreateStepSummaryAsync(file.PlanId, file.JobId, file.RecordId, file.Path, CancellationToken.None);
};
await UploadResultsFile(file, summaryHandler);
}
private async Task UploadResultsStepLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of step log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler stepLogHandler = async (file) =>
{
await _jobServer.CreateResultsStepLogAsync(file.PlanId, file.JobId, file.RecordId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, stepLogHandler);
}
private async Task UploadResultsJobLogFile(ResultsUploadFileInfo file)
{
Trace.Info($"Starting upload of job log file to results service {file.Name}, {file.Path}");
ResultsFileUploadHandler jobLogHandler = async (file) =>
{
await _jobServer.CreateResultsJobLogAsync(file.PlanId, file.JobId, file.Path, file.Finalize, file.FirstBlock, file.TotalLines, CancellationToken.None);
};
await UploadResultsFile(file, jobLogHandler);
}
private async Task UploadResultsFile(ResultsUploadFileInfo file, ResultsFileUploadHandler uploadHandler)
{
bool uploadSucceed = false;
try
{
await uploadHandler(file);
uploadSucceed = true;
}
finally
{
if (uploadSucceed && file.DeleteSource)
{
try
{
File.Delete(file.Path);
}
catch (Exception ex)
{
Trace.Info("Exception encountered during deletion of a temporary file that was already successfully uploaded to results.");
Trace.Error(ex);
}
}
}
}
}
internal class PendingTimelineRecord
@@ -683,6 +889,21 @@ namespace GitHub.Runner.Common
public bool DeleteSource { get; set; }
}
internal class ResultsUploadFileInfo
{
public string Name { get; set; }
public string Type { get; set; }
public string Path { get; set; }
public string PlanId { get; set; }
public string JobId { get; set; }
public Guid RecordId { get; set; }
public bool DeleteSource { get; set; }
public bool Finalize { get; set; }
public bool FirstBlock { get; set; }
public long TotalLines { get; set; }
}
internal class ConsoleLineInfo
{

View File

@@ -21,6 +21,12 @@ namespace GitHub.Runner.Common
// 8 MB
public const int PageSize = 8 * 1024 * 1024;
// For Results
public static string BlocksFolder = "blocks";
// 2 MB
public const int BlockSize = 2 * 1024 * 1024;
private Guid _timelineId;
private Guid _timelineRecordId;
private FileStream _pageData;
@@ -32,6 +38,13 @@ namespace GitHub.Runner.Common
private string _pagesFolder;
private IJobServerQueue _jobServerQueue;
private string _resultsDataFileName;
private FileStream _resultsBlockData;
private StreamWriter _resultsBlockWriter;
private string _resultsBlockFolder;
private int _blockByteCount;
private int _blockCount;
public long TotalLines => _totalLines;
public override void Initialize(IHostContext hostContext)
@@ -39,8 +52,10 @@ namespace GitHub.Runner.Common
base.Initialize(hostContext);
_totalLines = 0;
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
_resultsBlockFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), BlocksFolder);
Directory.CreateDirectory(_resultsBlockFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
}
public void Setup(Guid timelineId, Guid timelineRecordId)
@@ -60,11 +75,17 @@ namespace GitHub.Runner.Common
// lazy creation on write
if (_pageWriter == null)
{
Create();
NewPage();
}
if (_resultsBlockWriter == null)
{
NewBlock();
}
string line = $"{DateTime.UtcNow.ToString("O")} {message}";
_pageWriter.WriteLine(line);
_resultsBlockWriter.WriteLine(line);
_totalLines++;
if (line.IndexOf('\n') != -1)
@@ -78,21 +99,25 @@ namespace GitHub.Runner.Common
}
}
_byteCount += System.Text.Encoding.UTF8.GetByteCount(line);
var bytes = System.Text.Encoding.UTF8.GetByteCount(line);
_byteCount += bytes;
_blockByteCount += bytes;
if (_byteCount >= PageSize)
{
NewPage();
}
if (_blockByteCount >= BlockSize)
{
NewBlock();
}
}
public void End()
{
EndPage();
}
private void Create()
{
NewPage();
EndBlock(true);
}
private void NewPage()
@@ -117,5 +142,27 @@ namespace GitHub.Runner.Common
_jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true);
}
}
private void NewBlock()
{
EndBlock(false);
_blockByteCount = 0;
_resultsDataFileName = Path.Combine(_resultsBlockFolder, $"{_timelineId}_{_timelineRecordId}.{++_blockCount}");
_resultsBlockData = new FileStream(_resultsDataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
_resultsBlockWriter = new StreamWriter(_resultsBlockData, System.Text.Encoding.UTF8);
}
private void EndBlock(bool finalize)
{
if (_resultsBlockWriter != null)
{
_resultsBlockWriter.Flush();
_resultsBlockData.Flush();
_resultsBlockWriter.Dispose();
_resultsBlockWriter = null;
_resultsBlockData = null;
_jobServerQueue.QueueResultsUpload(_timelineRecordId, "ResultsLog", _resultsDataFileName, "Results.Core.Log", deleteSource: true, finalize, firstBlock: _resultsDataFileName.EndsWith(".1"), totalLines: _totalLines);
}
}
}
}

View File

@@ -1,11 +1,13 @@
using System;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi.RawClient;
namespace GitHub.Runner.Common
@@ -16,6 +18,10 @@ namespace GitHub.Runner.Common
Task ConnectAsync(Uri serverUrl, VssCredentials credentials);
Task<AgentJobRequestMessage> GetJobMessageAsync(string id, CancellationToken token);
Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken token);
Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken token);
}
public sealed class RunServer : RunnerService, IRunServer
@@ -29,7 +35,7 @@ namespace GitHub.Runner.Common
{
requestUri = serverUri;
_connection = VssUtil.CreateRawConnection(new Uri(serverUri.Authority), credentials);
_connection = VssUtil.CreateRawConnection(serverUri, credentials);
_runServiceHttpClient = await _connection.GetClientAsync<RunServiceHttpClient>();
_hasConnection = true;
}
@@ -55,5 +61,24 @@ namespace GitHub.Runner.Common
return jobMessage;
}
public Task CompleteJobAsync(Guid planId, Guid jobId, TaskResult result, Dictionary<String, VariableValue> outputs, IList<StepResult> stepResults, CancellationToken cancellationToken)
{
CheckConnection();
return RetryRequest(
async () => await _runServiceHttpClient.CompleteJobAsync(requestUri, planId, jobId, result, outputs, stepResults, cancellationToken), cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(Guid planId, Guid jobId, CancellationToken cancellationToken)
{
CheckConnection();
var renewJobResponse = RetryRequest<RenewJobResponse>(
async () => await _runServiceHttpClient.RenewJobAsync(requestUri, planId, jobId, cancellationToken), cancellationToken);
if (renewJobResponse == null)
{
throw new TaskOrchestrationJobNotFoundException(jobId.ToString());
}
return renewJobResponse;
}
}
}

View File

@@ -38,7 +38,7 @@ namespace GitHub.Runner.Common
Task<TaskAgentSession> CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken);
Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken);
Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, CancellationToken cancellationToken);
Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken);
// job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
@@ -272,10 +272,10 @@ namespace GitHub.Runner.Common
return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken);
}
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, CancellationToken cancellationToken)
public Task<TaskAgentMessage> GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, cancellationToken: cancellationToken);
return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, status, runnerVersion, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------

View File

@@ -68,6 +68,19 @@ namespace GitHub.Runner.Common
throw new InvalidOperationException(nameof(EstablishVssConnection));
}
protected async Task RetryRequest(Func<Task> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
)
{
async Task<Unit> wrappedFunc()
{
await func();
return Unit.Value;
}
await RetryRequest<Unit>(wrappedFunc, cancellationToken, maxRetryAttemptsCount);
}
protected async Task<T> RetryRequest<T>(Func<Task<T>> func,
CancellationToken cancellationToken,
int maxRetryAttemptsCount = 5
@@ -85,7 +98,7 @@ namespace GitHub.Runner.Common
// TODO: Add handling of non-retriable exceptions: https://github.com/github/actions-broker/issues/122
catch (Exception ex) when (retryCount < maxRetryAttemptsCount)
{
Trace.Error("Catch exception during get full job message");
Trace.Error("Catch exception during request");
Trace.Error(ex);
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
Trace.Warning($"Back off {backOff.TotalSeconds} seconds before next retry. {maxRetryAttemptsCount - retryCount} attempt left.");

View File

@@ -0,0 +1,96 @@
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
public sealed class StdoutTraceListener : ConsoleTraceListener
{
private readonly string _hostType;
public StdoutTraceListener(string hostType)
{
this._hostType = hostType;
}
// Copied and modified slightly from .Net Core source code. Modification was required to make it compile.
// There must be some TraceFilter extension class that is missing in this source code.
public override void TraceEvent(TraceEventCache eventCache, string source, TraceEventType eventType, int id, string message)
{
if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, message, null, null, null))
{
return;
}
if (!string.IsNullOrEmpty(message))
{
var messageLines = message.Split(Environment.NewLine);
foreach (var messageLine in messageLines)
{
WriteHeader(source, eventType, id);
WriteLine(messageLine);
WriteFooter(eventCache);
}
}
}
internal bool IsEnabled(TraceOptions opts)
{
return (opts & TraceOutputOptions) != 0;
}
// Altered from the original .Net Core implementation.
private void WriteHeader(string source, TraceEventType eventType, int id)
{
string type = null;
switch (eventType)
{
case TraceEventType.Critical:
type = "CRIT";
break;
case TraceEventType.Error:
type = "ERR ";
break;
case TraceEventType.Warning:
type = "WARN";
break;
case TraceEventType.Information:
type = "INFO";
break;
case TraceEventType.Verbose:
type = "VERB";
break;
default:
type = eventType.ToString();
break;
}
Write(StringUtil.Format("[{0} {1:u} {2} {3}] ", _hostType.ToUpperInvariant(), DateTime.UtcNow, type, source));
}
// Copied and modified slightly from .Net Core source code to make it compile. The original code
// accesses a private indentLevel field. In this code it has been modified to use the getter/setter.
private void WriteFooter(TraceEventCache eventCache)
{
if (eventCache == null)
return;
IndentLevel++;
if (IsEnabled(TraceOptions.ProcessId))
WriteLine("ProcessId=" + eventCache.ProcessId);
if (IsEnabled(TraceOptions.ThreadId))
WriteLine("ThreadId=" + eventCache.ThreadId);
if (IsEnabled(TraceOptions.DateTime))
WriteLine("DateTime=" + eventCache.DateTime.ToString("o", CultureInfo.InvariantCulture));
if (IsEnabled(TraceOptions.Timestamp))
WriteLine("Timestamp=" + eventCache.Timestamp);
IndentLevel--;
}
}
}

View File

@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
string ReadSecret();
void Write(string message, ConsoleColor? colorCode = null);
void WriteLine();
void WriteLine(string line, ConsoleColor? colorCode = null);
void WriteLine(string line, ConsoleColor? colorCode = null, bool skipTracing = false);
void WriteError(Exception ex);
void WriteError(string line);
void WriteSection(string message);
@@ -116,9 +116,12 @@ namespace GitHub.Runner.Common
// Do not add a format string overload. Terminal messages are user facing and therefore
// should be localized. Use the Loc method in the StringUtil class.
public void WriteLine(string line, ConsoleColor? colorCode = null)
public void WriteLine(string line, ConsoleColor? colorCode = null, bool skipTracing = false)
{
Trace.Info($"WRITE LINE: {line}");
if (!skipTracing)
{
Trace.Info($"WRITE LINE: {line}");
}
if (!Silent)
{
if (colorCode != null)

View File

@@ -16,21 +16,23 @@ namespace GitHub.Runner.Common
{
private readonly ConcurrentDictionary<string, Tracing> _sources = new(StringComparer.OrdinalIgnoreCase);
private readonly HostTraceListener _hostTraceListener;
private readonly StdoutTraceListener _stdoutTraceListener;
private TraceSetting _traceSetting;
private ISecretMasker _secretMasker;
public TraceManager(HostTraceListener traceListener, ISecretMasker secretMasker)
: this(traceListener, new TraceSetting(), secretMasker)
public TraceManager(HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener, ISecretMasker secretMasker)
: this(traceListener, stdoutTraceListener, new TraceSetting(), secretMasker)
{
}
public TraceManager(HostTraceListener traceListener, TraceSetting traceSetting, ISecretMasker secretMasker)
public TraceManager(HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener, TraceSetting traceSetting, ISecretMasker secretMasker)
{
// Validate and store params.
ArgUtil.NotNull(traceListener, nameof(traceListener));
ArgUtil.NotNull(traceSetting, nameof(traceSetting));
ArgUtil.NotNull(secretMasker, nameof(secretMasker));
_hostTraceListener = traceListener;
_stdoutTraceListener = stdoutTraceListener;
_traceSetting = traceSetting;
_secretMasker = secretMasker;
@@ -81,7 +83,7 @@ namespace GitHub.Runner.Common
Level = sourceTraceLevel.ToSourceLevels()
};
}
return new Tracing(name, _secretMasker, sourceSwitch, _hostTraceListener);
return new Tracing(name, _secretMasker, sourceSwitch, _hostTraceListener, _stdoutTraceListener);
}
}
}

View File

@@ -12,7 +12,7 @@ namespace GitHub.Runner.Common
private ISecretMasker _secretMasker;
private TraceSource _traceSource;
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener)
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener = null)
{
ArgUtil.NotNull(secretMasker, nameof(secretMasker));
_secretMasker = secretMasker;
@@ -27,6 +27,10 @@ namespace GitHub.Runner.Common
}
_traceSource.Listeners.Add(traceListener);
if (stdoutTraceListener != null)
{
_traceSource.Listeners.Add(stdoutTraceListener);
}
}
public void Info(string message)

View File

@@ -0,0 +1,8 @@
// Represents absence of value.
namespace GitHub.Runner.Common
{
public readonly struct Unit
{
public static readonly Unit Value = default;
}
}

View File

@@ -0,0 +1,14 @@
namespace GitHub.Runner.Common.Util
{
using System;
using GitHub.DistributedTask.WebApi;
public static class MessageUtil
{
public static bool IsRunServiceJob(string messageType)
{
return string.Equals(messageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase);
}
}
}

View File

@@ -56,7 +56,8 @@ namespace GitHub.Runner.Listener
new string[]
{
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.PAT
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Flags.Local
},
// Valid run flags and args
[Constants.Runner.CommandLine.Commands.Run] =
@@ -86,6 +87,7 @@ namespace GitHub.Runner.Listener
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RemoveLocalConfig => TestFlag(Constants.Runner.CommandLine.Flags.Local);
// Keep this around since customers still relies on it
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);

View File

@@ -1,10 +1,3 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Common.Internal;
using GitHub.Services.OAuth;
using System;
using System.Collections.Generic;
using System.Linq;
@@ -14,6 +7,13 @@ using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.Common.Internal;
using GitHub.Services.OAuth;
namespace GitHub.Runner.Listener.Configuration
{
@@ -636,7 +636,7 @@ namespace GitHub.Runner.Listener.Configuration
}
int retryCount = 0;
while(retryCount < 3)
while (retryCount < 3)
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
@@ -646,28 +646,29 @@ namespace GitHub.Runner.Listener.Configuration
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
var responseStatus = System.Net.HttpStatusCode.OK;
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
@@ -714,22 +715,23 @@ namespace GitHub.Runner.Listener.Configuration
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
responseStatus = response.StatusCode;
var githubRequestId = GetGitHubRequestId(response.Headers);
if(response.IsSuccessStatusCode)
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' ({githubRequestId})");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}' (Request Id: {githubRequestId})");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
catch (Exception ex) when (retryCount < 2 && responseStatus != System.Net.HttpStatusCode.NotFound)
{
retryCount++;
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
@@ -742,5 +744,14 @@ namespace GitHub.Runner.Listener.Configuration
}
return null;
}
private string GetGitHubRequestId(HttpResponseHeaders headers)
{
if (headers.TryGetValues("x-github-request-id", out var headerValues))
{
return headerValues.FirstOrDefault();
}
return string.Empty;
}
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]

View File

@@ -7,6 +7,7 @@ using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -58,6 +59,8 @@ namespace GitHub.Runner.Listener
public event EventHandler<JobStatusEventArgs> JobStatus;
private bool _isRunServiceJob;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -86,6 +89,8 @@ namespace GitHub.Runner.Listener
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
_isRunServiceJob = MessageUtil.IsRunServiceJob(jobRequestMessage.MessageType);
WorkerDispatcher currentDispatch = null;
if (_jobDispatchedQueue.Count > 0)
{
@@ -239,6 +244,13 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Error($"We are not yet checking the state of jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
jobDispatch.WorkerCancellationTokenSource.Cancel();
return;
}
// based on the current design, server will only send one job for a given runner at a time.
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
@@ -367,9 +379,11 @@ namespace GitHub.Runner.Listener
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
Task renewJobRequest = RenewJobRequestAsync(message, systemConnection, _poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is cancelled
// not even start worker if the first renew fail
@@ -400,6 +414,7 @@ namespace GitHub.Runner.Listener
Task<int> workerProcessTask = null;
object _outputLock = new();
List<string> workerOutput = new();
bool printToStdout = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Agent.PrintLogToStdout));
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
@@ -421,7 +436,15 @@ namespace GitHub.Runner.Listener
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
if (!stdout.Data.StartsWith("[WORKER"))
{
workerOutput.Add(stdout.Data);
}
if (printToStdout)
{
term.WriteLine(stdout.Data, skipTracing: true);
}
}
}
};
@@ -499,7 +522,6 @@ namespace GitHub.Runner.Listener
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
@@ -522,11 +544,8 @@ namespace GitHub.Runner.Listener
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var jobServer = await InitializeJobServerAsync(systemConnection);
await LogWorkerProcessUnhandledException(jobServer, message, detailInfo);
// Go ahead to finish the job with result 'Failed' if the STDERR from worker is System.IO.IOException, since it typically means we are running out of disk space.
@@ -658,7 +677,7 @@ namespace GitHub.Runner.Listener
finally
{
Busy = false;
if (JobStatus != null)
{
JobStatus(this, new JobStatusEventArgs(TaskAgentStatus.Online));
@@ -666,9 +685,128 @@ namespace GitHub.Runner.Listener
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
internal async Task RenewJobRequestAsync(Pipelines.AgentJobRequestMessage message, ServiceEndpoint systemConnection, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
if (this._isRunServiceJob)
{
var runServer = await GetRunServerAsync(systemConnection);
await RenewJobRequestAsync(runServer, message.Plan.PlanId, message.JobId, firstJobRequestRenewed, token);
}
else
{
var runnerServer = HostContext.GetService<IRunnerServer>();
await RenewJobRequestAsync(runnerServer, poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, token);
}
}
private async Task RenewJobRequestAsync(IRunServer runServer, Guid planId, Guid jobId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
// renew lock during job running.
// stop renew only if cancellation token for lock renew task been signal or exception still happen after retry.
while (!token.IsCancellationRequested)
{
try
{
var renewResponse = await runServer.RenewJobAsync(planId, jobId, token);
Trace.Info($"Successfully renew job {jobId}, job is valid till {renewResponse.LockedUntil}");
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
if (encounteringError > 0)
{
encounteringError = 0;
HostContext.WritePerfCounter("JobRenewRecovered");
}
// renew again after 60 sec delay
await HostContext.Delay(TimeSpan.FromSeconds(60), token);
}
catch (TaskOrchestrationJobNotFoundException)
{
// no need for retry. the job is not valid anymore.
Trace.Info($"TaskAgentJobNotFoundException received when renew job {jobId}, job is no longer valid, stop renew job request.");
return;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
// Stop renew only on cancellation token fired.
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
return;
}
catch (Exception ex)
{
Trace.Error($"Catch exception during renew runner job {jobId}.");
Trace.Error(ex);
encounteringError++;
// retry
TimeSpan remainingTime = TimeSpan.Zero;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
// retry 5 times every 10 sec for the first renew
if (firstRenewRetryLimit-- > 0)
{
remainingTime = TimeSpan.FromSeconds(10);
}
}
else
{
// retry till reach lockeduntil + 5 mins extra buffer.
remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow;
}
if (remainingTime > TimeSpan.Zero)
{
TimeSpan delayTime;
if (!firstJobRequestRenewed.Task.IsCompleted)
{
Trace.Info($"Retrying lock renewal for job {jobId}. The first job renew request has failed.");
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10));
}
else
{
Trace.Info($"Retrying lock renewal for job {jobId}. Job is valid until {request.LockedUntil.Value}.");
if (encounteringError > 5)
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30));
}
else
{
delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15));
}
}
try
{
// back-off before next retry.
await HostContext.Delay(delayTime, token);
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"job renew has been cancelled, stop renew job {jobId}.");
}
}
else
{
Trace.Info($"Lock renewal has run out of retry, stop renew lock for job {jobId}.");
HostContext.WritePerfCounter("JobRenewReachLimit");
return;
}
}
}
}
private async Task RenewJobRequestAsync(IRunnerServer runnerServer, int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
int firstRenewRetryLimit = 5;
int encounteringError = 0;
@@ -831,90 +969,93 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
var server = await InitializeJobServerAsync(systemConnection);
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
if (server is IJobServer jobServer)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
if (updatedRecords.Count > 0)
else
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
Trace.Info("Job server does not support log upload yet.");
}
}
catch (Exception ex)
@@ -934,6 +1075,12 @@ namespace GitHub.Runner.Listener
return;
}
if (this._isRunServiceJob)
{
Trace.Verbose($"Skip FinishAgentRequest call from Listener because MessageType is {message.MessageType}");
return;
}
var runnerServer = HostContext.GetService<IRunnerServer>();
int completeJobRequestRetryLimit = 5;
List<Exception> exceptions = new();
@@ -970,66 +1117,117 @@ namespace GitHub.Runner.Listener
}
// log an error issue to job level timeline record
private async Task LogWorkerProcessUnhandledException(IJobServer jobServer, Pipelines.AgentJobRequestMessage message, string errorMessage)
private async Task LogWorkerProcessUnhandledException(IRunnerService server, Pipelines.AgentJobRequestMessage message, string errorMessage)
{
try
if (server is IJobServer jobServer)
{
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
try
{
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
if (!string.IsNullOrEmpty(errorMessage) &&
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
// the trace should be best effort and not affect any job result
var match = _invalidJsonRegex.Match(errorMessage);
if (match.Success &&
match.Groups.Count == 2)
{
var jsonPosition = int.Parse(match.Groups[1].Value);
var serializedJobMessage = JsonUtility.ToString(message);
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
}
}
}
catch (Exception ex)
{
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
}
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)
else
{
Trace.Error("Fail to report unhandled exception from Runner.Worker process");
Trace.Error(ex);
Trace.Info("Job server does not support handling unhandled exception yet, error message: {0}", errorMessage);
return;
}
}
// raise job completed event to fail the job.
private async Task ForceFailJob(IJobServer jobServer, Pipelines.AgentJobRequestMessage message)
private async Task ForceFailJob(IRunnerService server, Pipelines.AgentJobRequestMessage message)
{
try
if (server is IJobServer jobServer)
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
try
{
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, TaskResult.Failed);
await jobServer.RaisePlanEventAsync<JobCompletedEvent>(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
}
}
catch (Exception ex)
else if (server is IRunServer runServer)
{
Trace.Error("Fail to raise JobCompletedEvent back to service.");
Trace.Error(ex);
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, TaskResult.Failed, outputs: null, stepResults: null, CancellationToken.None);
}
catch (Exception ex)
{
Trace.Error("Fail to raise job completion back to service.");
Trace.Error(ex);
}
}
else
{
throw new NotSupportedException($"Server type {server.GetType().FullName} is not supported.");
}
}
private async Task<IRunnerService> InitializeJobServerAsync(ServiceEndpoint systemConnection)
{
if (this._isRunServiceJob)
{
return await GetRunServerAsync(systemConnection);
}
else
{
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
return jobServer;
}
}
private async Task<IRunServer> GetRunServerAsync(ServiceEndpoint systemConnection)
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
return runServer;
}
private class WorkerDispatcher : IDisposable

View File

@@ -211,6 +211,7 @@ namespace GitHub.Runner.Listener
_session.SessionId,
_lastMessageId,
runnerStatus,
BuildConstants.RunnerPackage.Version,
_getMessagesTokenSource.Token);
// Decrypt the message body if the session is using encryption

View File

@@ -9,6 +9,7 @@ using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Listener.Check;
using GitHub.Runner.Listener.Configuration;
using GitHub.Runner.Sdk;
@@ -135,6 +136,12 @@ namespace GitHub.Runner.Listener
// remove config files, remove service, and exit
if (command.Remove)
{
// only remove local config files and exit
if (command.RemoveLocalConfig)
{
configManager.DeleteLocalRunnerConfig();
return Constants.Runner.ReturnCode.Success;
}
try
{
await configManager.UnconfigureAsync(command);
@@ -430,12 +437,22 @@ namespace GitHub.Runner.Listener
message = await getNextMessage; //get next message
HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}");
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase) ||
string.Equals(message.MessageType, RunnerRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
{
if (autoUpdateInProgress == false)
{
autoUpdateInProgress = true;
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
AgentRefreshMessage runnerUpdateMessage = null;
if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase))
{
runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
}
else
{
var brokerRunnerUpdateMessage = JsonUtility.FromString<RunnerRefreshMessage>(message.Body);
runnerUpdateMessage = new AgentRefreshMessage(brokerRunnerUpdateMessage.RunnerId, brokerRunnerUpdateMessage.TargetVersion, TimeSpan.FromSeconds(brokerRunnerUpdateMessage.TimeoutInSeconds));
}
#if DEBUG
// Can mock the update for testing
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
@@ -486,7 +503,7 @@ namespace GitHub.Runner.Listener
}
}
// Broker flow
else if (string.Equals(message.MessageType, JobRequestMessageTypes.RunnerJobRequest, StringComparison.OrdinalIgnoreCase))
else if (MessageUtil.IsRunServiceJob(message.MessageType))
{
if (autoUpdateInProgress || runOnceJobReceived)
{
@@ -637,6 +654,7 @@ Config Options:
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--local Removes the runner config files from your local machine. Used as an option to the remove command
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)
--pat GitHub personal access token with repo scope. Used for checking network connectivity when executing `.{separator}run.{ext} --check`

View File

@@ -73,7 +73,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.Add(new ProductInfoHeaderValue($"GitHubActionsRunner-Plugin", BuildConstants.RunnerPackage.Version));
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{

View File

@@ -264,7 +264,17 @@ namespace GitHub.Runner.Sdk
{
foreach (KeyValuePair<string, string> kvp in environment)
{
#if OS_WINDOWS
string tempKey = String.IsNullOrWhiteSpace(kvp.Key) ? kvp.Key : kvp.Key.Split('\0')[0];
string tempValue = String.IsNullOrWhiteSpace(kvp.Value) ? kvp.Value : kvp.Value.Split('\0')[0];
if(!String.IsNullOrWhiteSpace(tempKey))
{
_proc.StartInfo.Environment[tempKey] = tempValue;
}
#else
_proc.StartInfo.Environment[kvp.Key] = kvp.Value;
#endif
}
}

View File

@@ -164,7 +164,6 @@ namespace GitHub.Runner.Sdk
{
continue;
}
_noProxyList.Add(noProxyInfo);
}
}
@@ -207,6 +206,11 @@ namespace GitHub.Runner.Sdk
{
foreach (var noProxy in _noProxyList)
{
// bypass on wildcard no_proxy
if (string.Equals(noProxy.Host, "*", StringComparison.OrdinalIgnoreCase))
{
return true;
}
var matchHost = false;
var matchPort = false;

View File

@@ -123,5 +123,12 @@ namespace GitHub.Runner.Sdk
{
return value?.Substring(0, Math.Min(value.Length, count));
}
// Fixes format violations e.g. https://github.com/actions/runner/issues/2165
public static string SanitizeUserAgentHeader(string header)
{
return header.Replace("(", "[").Replace(")", "]").Trim();
}
}
}

View File

@@ -6,9 +6,16 @@ namespace GitHub.Runner.Sdk
{
public static bool IsHostedServer(UriBuilder gitHubUrl)
{
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_FORCE_GHES")))
{
return false;
}
return
string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase) ||
gitHubUrl.Host.EndsWith(".ghe.com", StringComparison.OrdinalIgnoreCase);
}
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)

View File

@@ -19,7 +19,7 @@ namespace GitHub.Runner.Sdk
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.AddRange(additionalUserAgents);
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
headerValues.Add(new ProductInfoHeaderValue($"({StringUtil.SanitizeUserAgentHeader(RuntimeInformation.OSDescription)})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
{
@@ -116,7 +116,7 @@ namespace GitHub.Runner.Sdk
// settings are applied to an HttpRequestMessage.
settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.ToOAuthCredentials(), settings), additionalDelegatingHandler);
RawConnection connection = new(serverUri, new RawHttpMessageHandler(credentials.Federated, settings), additionalDelegatingHandler);
return connection;
}

View File

@@ -25,7 +25,6 @@ namespace GitHub.Runner.Worker
public interface IActionRunner : IStep, IRunnerService
{
ActionRunStage Stage { get; set; }
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Pipelines.ActionStep Action { get; set; }
}
@@ -182,7 +181,7 @@ namespace GitHub.Runner.Worker
else
{
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions, ExecutionContext.ToExpressionState());
}
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
@@ -285,25 +284,67 @@ namespace GitHub.Runner.Worker
}
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
/// <summary>
/// Attempts to update the DisplayName.
/// As the "Try..." name implies, this method should never throw an exception.
/// Returns true if the DisplayName is already present or it was successfully updated.
/// </summary>
public bool TryUpdateDisplayName(out bool updated)
{
updated = false;
// REVIEW: This try/catch can be removed if some future implementation of EvaluateDisplayName and UpdateTimelineRecordDisplayName
// can make reasonable guarantees that they won't throw an exception.
try
{
// This attempt is only worthwhile at the "Main" stage.
// When the job starts, there's an initial attempt to evaluate the DisplayName. (see JobExtension::InitializeJob)
// During the "Pre" stage, we expect that no contexts will have changed since the initial evaluation.
// "Main" stage is handled here.
// During the "Post" stage, it no longer matters.
if (this.Stage == ActionRunStage.Main && EvaluateDisplayName(this.ExecutionContext.ExpressionValues, this.ExecutionContext, out updated))
{
if (updated)
{
this.ExecutionContext.UpdateTimelineRecordDisplayName(this.DisplayName);
}
}
}
catch (Exception ex)
{
Trace.Warning("Caught exception while attempting to evaulate/update the step's DisplayName. Exception Details: {0}", ex);
}
// For consistency with other implementations of TryUpdateDisplayName we use !string.IsNullOrEmpty below,
// but note that (at the time of this writing) ActionRunner::DisplayName::get always returns a non-empty string due to its fallback logic.
// In other words, the net effect is that this particular implementation of TryUpdateDisplayName will always return true.
return !string.IsNullOrEmpty(this.DisplayName);
}
/// <summary>
/// Attempts to evaluate the DisplayName of this IActionRunner.
/// Returns true if the DisplayName is already present or it was successfully evaluated.
/// </summary>
public bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated)
{
ArgUtil.NotNull(context, nameof(context));
ArgUtil.NotNull(Action, nameof(Action));
// If we have already expanded the display name, there is no need to expand it again
// TODO: Remove the ShouldEvaluateDisplayName check and field post m158 deploy, we should do it by default once the server is updated
updated = false;
// If we have already expanded the display name, don't bother attempting [re-]expansion.
if (_didFullyEvaluateDisplayName || !string.IsNullOrEmpty(Action.DisplayName))
{
return false;
return true;
}
bool didFullyEvaluate;
_displayName = GenerateDisplayName(Action, contextData, context, out didFullyEvaluate);
_displayName = GenerateDisplayName(Action, contextData, context, out bool didFullyEvaluate);
// If we evaluated fully mask any secrets
// If we evaluated, fully mask any secrets
if (didFullyEvaluate)
{
_displayName = HostContext.SecretMasker.MaskSecrets(_displayName);
updated = true;
}
context.Debug($"Set step '{Action.Name}' display name to: '{_displayName}'");
_didFullyEvaluateDisplayName = didFullyEvaluate;
@@ -314,7 +355,7 @@ namespace GitHub.Runner.Worker
{
DictionaryContextData expressionValues = ExecutionContext.GetExpressionValues(stepHost);
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, expressionValues, ExecutionContext.ExpressionFunctions);
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, expressionValues, ExecutionContext.ExpressionFunctions, ExecutionContext.ToExpressionState());
return inputs;
}

View File

@@ -33,8 +33,14 @@ namespace GitHub.Runner.Worker
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_dockerManager = HostContext.GetService<IDockerCommandManager>();
_containerHookManager = HostContext.GetService<IContainerHookManager>();
if (string.IsNullOrEmpty(Environment.GetEnvironmentVariable(Constants.Hooks.ContainerHooksPath)))
{
_dockerManager = HostContext.GetService<IDockerCommandManager>();
}
else
{
_containerHookManager = HostContext.GetService<IContainerHookManager>();
}
}
public async Task StartContainersAsync(IExecutionContext executionContext, object data)

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
@@ -6,6 +6,7 @@ using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
@@ -80,6 +81,7 @@ namespace GitHub.Runner.Worker
// logging
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
void QueueSummaryFile(string name, string filePath, Guid stepRecordId);
// timeline record update methods
void Start(string currentOperation = null);
@@ -436,6 +438,17 @@ namespace GitHub.Runner.Worker
PublishStepTelemetry();
var stepResult = new StepResult();
stepResult.ExternalID = _record.Id;
stepResult.Conclusion = _record.Result ?? TaskResult.Succeeded;
stepResult.Status = _record.State;
stepResult.Number = _record.Order;
stepResult.Name = _record.Name;
stepResult.StartedAt = _record.StartTime;
stepResult.CompletedAt = _record.FinishTime;
Global.StepsResult.Add(stepResult);
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
@@ -709,6 +722,9 @@ namespace GitHub.Runner.Worker
// ActionsStepTelemetry for entire job
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
// Steps results for entire job
Global.StepsResult = new List<StepResult>();
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
@@ -846,6 +862,18 @@ namespace GitHub.Runner.Worker
_jobServerQueue.QueueFileUpload(_mainTimelineId, _record.Id, type, name, filePath, deleteSource: false);
}
public void QueueSummaryFile(string name, string filePath, Guid stepRecordId)
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
ArgUtil.NotNullOrEmpty(filePath, nameof(filePath));
if (!File.Exists(filePath))
{
throw new FileNotFoundException($"Can't upload (name:{name}) file: {filePath}. File does not exist.");
}
_jobServerQueue.QueueResultsUpload(stepRecordId, name, filePath, ChecksAttachmentType.StepSummary, deleteSource: false, finalize: true, firstBlock: true, totalLines: 0);
}
// Add OnMatcherChanged
public void Add(OnMatcherChanged handler)
{
@@ -1085,7 +1113,7 @@ namespace GitHub.Runner.Worker
{
if (contextData != null &&
contextData.TryGetValue(PipelineTemplateConstants.Vars, out var varsPipelineContextData) &&
varsPipelineContextData != null &&
varsPipelineContextData != null &&
varsPipelineContextData is DictionaryContextData varsContextData)
{
// Set debug variables only when StepDebug/RunnerDebug variables are not present.

View File

@@ -29,6 +29,9 @@ namespace GitHub.Runner.Worker.Expressions
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace);
var workspaceData = workspace as StringContextData;
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
string githubWorkspace = workspaceData.Value;
bool followSymlink = false;

View File

@@ -204,13 +204,23 @@ namespace GitHub.Runner.Worker
}
}
var attachmentName = !context.IsEmbedded
? context.Id.ToString()
var attachmentName = !context.IsEmbedded
? context.Id.ToString()
: context.EmbeddedId.ToString();
Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})");
// Attachments must be added to the parent context (job), not the current context (step)
context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath);
// Dual upload the same files to Results Service
context.Global.Variables.TryGetValue("system.github.results_endpoint", out string resultsReceiverEndpoint);
if (resultsReceiverEndpoint != null)
{
Trace.Info($"Queueing results file ({filePath}) for attachment upload ({attachmentName})");
var stepId = context.Id;
// Attachments must be added to the parent context (job), not the current context (step)
context.Root.QueueSummaryFile(attachmentName, scrubbedFilePath, stepId);
}
}
catch (Exception e)
{

View File

@@ -13,6 +13,7 @@ namespace GitHub.Runner.Worker
"action_repository",
"action",
"actor",
"actor_id",
"api_url",
"base_ref",
"env",
@@ -27,8 +28,10 @@ namespace GitHub.Runner.Worker
"ref_protected",
"ref_type",
"ref",
"repository_owner",
"repository",
"repository_id",
"repository_owner",
"repository_owner_id",
"retention_days",
"run_attempt",
"run_id",
@@ -39,7 +42,9 @@ namespace GitHub.Runner.Worker
"step_summary",
"triggering_actor",
"workflow",
"workspace",
"workflow_ref",
"workflow_sha",
"workspace"
};
public IEnumerable<KeyValuePair<string, string>> GetRuntimeEnvironmentVariables()

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using GitHub.Actions.RunService.WebApi;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
@@ -16,6 +17,7 @@ namespace GitHub.Runner.Worker
public IList<String> FileTable { get; set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
public List<StepResult> StepsResult { get; set; }
public List<JobTelemetry> JobTelemetry { get; set; }
public TaskOrchestrationPlanReference Plan { get; set; }
public List<string> PrependPath { get; set; }

View File

@@ -239,7 +239,7 @@ namespace GitHub.Runner.Worker.Handlers
// Set action_status to the success of the current composite action
var actionResult = ExecutionContext.Result?.ToActionResult() ?? ActionResult.Success;
step.ExecutionContext.SetGitHubContext("action_status", actionResult.ToString());
step.ExecutionContext.SetGitHubContext("action_status", actionResult.ToString().ToLowerInvariant());
// Initialize env context
Trace.Info("Initialize Env context for embedded step");

View File

@@ -38,8 +38,17 @@ namespace GitHub.Runner.Worker.Handlers
// Update the env dictionary.
AddInputsToEnvironment();
var dockerManager = HostContext.GetService<IDockerCommandManager>();
var containerHookManager = HostContext.GetService<IContainerHookManager>();
IDockerCommandManager dockerManager = null;
IContainerHookManager containerHookManager = null;
if (FeatureManager.IsContainerHooksEnabled(ExecutionContext.Global.Variables))
{
containerHookManager = HostContext.GetService<IContainerHookManager>();
}
else
{
dockerManager = HostContext.GetService<IDockerCommandManager>();
}
string dockerFile = null;
// container image haven't built/pull

View File

@@ -306,13 +306,13 @@ namespace GitHub.Runner.Worker
}
}
actionRunner.TryEvaluateDisplayName(contextData, context);
actionRunner.EvaluateDisplayName(contextData, context, out _);
jobSteps.Add(actionRunner);
if (prepareResult.PreStepTracker.TryGetValue(step.Id, out var preStep))
{
Trace.Info($"Adding pre-{action.DisplayName}.");
preStep.TryEvaluateDisplayName(contextData, context);
preStep.EvaluateDisplayName(contextData, context, out _);
preStep.DisplayName = $"Pre {preStep.DisplayName}";
preJobSteps.Add(preStep);
}
@@ -321,25 +321,28 @@ namespace GitHub.Runner.Worker
if (message.Variables.TryGetValue("system.workflowFileFullPath", out VariableValue workflowFileFullPath))
{
context.Output($"Uses: {workflowFileFullPath.Value}");
var usesLogText = $"Uses: {workflowFileFullPath.Value}";
var reference = GetWorkflowReference(message.Variables);
context.Output(usesLogText + reference);
if (message.ContextData.TryGetValue("inputs", out var pipelineContextData))
{
var inputs = pipelineContextData.AssertDictionary("inputs");
if (inputs.Any())
if (inputs.Any())
{
context.Output($"##[group] Inputs");
foreach (var input in inputs)
foreach (var input in inputs)
{
context.Output($" {input.Key}: {input.Value}");
}
context.Output("##[endgroup]");
}
}
}
if (!string.IsNullOrWhiteSpace(message.JobDisplayName))
{
context.Output($"Complete job name: {message.JobDisplayName}");
}
if (!string.IsNullOrWhiteSpace(message.JobDisplayName))
{
context.Output($"Complete job name: {message.JobDisplayName}");
}
var intraActionStates = new Dictionary<Guid, Dictionary<string, string>>();
@@ -452,6 +455,24 @@ namespace GitHub.Runner.Worker
}
}
private string GetWorkflowReference(IDictionary<string, VariableValue> variables)
{
var reference = "";
if (variables.TryGetValue("system.workflowFileSha", out VariableValue workflowFileSha))
{
if (variables.TryGetValue("system.workflowFileRef", out VariableValue workflowFileRef)
&& !string.IsNullOrEmpty(workflowFileRef.Value))
{
reference += $"@{workflowFileRef.Value} ({workflowFileSha.Value})";
}
else
{
reference += $"@{workflowFileSha.Value}";
}
}
return reference;
}
public void FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc)
{
Trace.Entering();

View File

@@ -1,6 +1,7 @@
using System;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
namespace GitHub.Runner.Worker
{
@@ -32,5 +33,18 @@ namespace GitHub.Runner.Worker
{
await _runAsync(ExecutionContext, _data);
}
public bool TryUpdateDisplayName(out bool updated)
{
updated = false;
return !string.IsNullOrEmpty(this.DisplayName);
}
public bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated)
{
updated = false;
return !string.IsNullOrEmpty(this.DisplayName);
}
}
}

View File

@@ -6,7 +6,7 @@ using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
@@ -20,7 +20,7 @@ namespace GitHub.Runner.Worker
[ServiceLocator(Default = typeof(JobRunner))]
public interface IJobRunner : IRunnerService
{
Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken);
}
public sealed class JobRunner : RunnerService, IJobRunner
@@ -29,7 +29,7 @@ namespace GitHub.Runner.Worker
private RunnerSettings _runnerSettings;
private ITempDirectoryManager _tempDirectoryManager;
public async Task<TaskResult> RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
public async Task<TaskResult> RunAsync(AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken)
{
// Validate parameters.
Trace.Entering();
@@ -40,21 +40,34 @@ namespace GitHub.Runner.Worker
Trace.Info("Job ID {0}", message.JobId);
DateTime jobStartTimeUtc = DateTime.UtcNow;
IRunnerService server = null;
ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (MessageUtil.IsRunServiceJob(message.MessageType))
{
var runServer = HostContext.GetService<IRunServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
await runServer.ConnectAsync(systemConnection.Url, jobServerCredential);
server = runServer;
}
else
{
// Setup the job server and job server queue.
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
Uri jobServerUrl = systemConnection.Url;
// Setup the job server and job server queue.
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
Uri jobServerUrl = systemConnection.Url;
Trace.Info($"Creating job server with URL: {jobServerUrl}");
// jobServerQueue is the throttling reporter.
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message);
server = jobServer;
}
Trace.Info($"Creating job server with URL: {jobServerUrl}");
// jobServerQueue is the throttling reporter.
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
await jobServer.ConnectAsync(jobConnection);
_jobServerQueue.Start(message);
HostContext.WritePerfCounter($"WorkerJobServerQueueStarted_{message.RequestId.ToString()}");
IExecutionContext jobContext = null;
@@ -99,7 +112,7 @@ namespace GitHub.Runner.Worker
{
Trace.Error(ex);
jobContext.Error(ex);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
}
if (jobContext.Global.WriteDebug)
@@ -136,7 +149,7 @@ namespace GitHub.Runner.Worker
// don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job is cancelled during initialize.");
Trace.Error($"Caught exception: {ex}");
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Canceled);
}
catch (Exception ex)
{
@@ -144,7 +157,7 @@ namespace GitHub.Runner.Worker
// don't log error issue to job ExecutionContext, since server owns the job level issue
Trace.Error($"Job initialize failed.");
Trace.Error($"Caught exception from {nameof(jobExtension.InitializeJob)}: {ex}");
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
}
// trace out all steps
@@ -181,7 +194,7 @@ namespace GitHub.Runner.Worker
// Log the error and fail the job.
Trace.Error($"Caught exception from job steps {nameof(StepsRunner)}: {ex}");
jobContext.Error(ex);
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
}
finally
{
@@ -192,7 +205,7 @@ namespace GitHub.Runner.Worker
Trace.Info($"Job result after all job steps finish: {jobContext.Result ?? TaskResult.Succeeded}");
Trace.Info("Completing the job execution context.");
return await CompleteJobAsync(jobServer, jobContext, message);
return await CompleteJobAsync(server, jobContext, message);
}
finally
{
@@ -206,6 +219,66 @@ namespace GitHub.Runner.Worker
}
}
private async Task<TaskResult> CompleteJobAsync(IRunnerService server, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
if (server is IRunServer runServer)
{
return await CompleteJobAsync(runServer, jobContext, message, taskResult);
}
else if (server is IJobServer jobServer)
{
return await CompleteJobAsync(jobServer, jobContext, message, taskResult);
}
else
{
throw new NotSupportedException();
}
}
private async Task<TaskResult> CompleteJobAsync(IRunServer runServer, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
jobContext.Debug($"Finishing: {message.JobDisplayName}");
TaskResult result = jobContext.Complete(taskResult);
if (jobContext.Global.Variables.TryGetValue("Node12ActionsWarnings", out var node12Warnings))
{
var actions = string.Join(", ", StringUtil.ConvertFromJson<HashSet<string>>(node12Warnings));
jobContext.Warning(string.Format(Constants.Runner.Node12DetectedAfterEndOfLife, actions));
}
// Make sure to clean temp after file upload since they may be pending fileupload still use the TEMP dir.
_tempDirectoryManager?.CleanupTempDirectory();
// Load any upgrade telemetry
LoadFromTelemetryFile(jobContext.Global.JobTelemetry);
// Make sure we don't submit secrets as telemetry
MaskTelemetrySecrets(jobContext.Global.JobTelemetry);
Trace.Info($"Raising job completed against run service");
var completeJobRetryLimit = 5;
var exceptions = new List<Exception>();
while (completeJobRetryLimit-- > 0)
{
try
{
await runServer.CompleteJobAsync(message.Plan.PlanId, message.JobId, result, jobContext.JobOutputs, jobContext.Global.StepsResult, default);
return result;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to complete job {message.JobId}, job request {message.RequestId}.");
Trace.Error(ex);
exceptions.Add(ex);
}
// delay 5 seconds before next retry.
await Task.Delay(TimeSpan.FromSeconds(5));
}
// rethrow exceptions from all attempts.
throw new AggregateException(exceptions);
}
private async Task<TaskResult> CompleteJobAsync(IJobServer jobServer, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null)
{
jobContext.Debug($"Finishing: {message.JobDisplayName}");

View File

@@ -1,12 +1,9 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
@@ -14,8 +11,6 @@ using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Expressions;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -26,6 +21,8 @@ namespace GitHub.Runner.Worker
string DisplayName { get; set; }
IExecutionContext ExecutionContext { get; set; }
TemplateToken Timeout { get; }
bool TryUpdateDisplayName(out bool updated);
bool EvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context, out bool updated);
Task RunAsync();
}
@@ -195,6 +192,12 @@ namespace GitHub.Runner.Worker
}
else
{
// This is our last, best chance to expand the display name. (At this point, all the requirements for successful expansion should be met.)
// That being said, evaluating the display name should still be considered as a "best effort" exercise. (It's not critical or paramount.)
// For that reason, we call a safe "Try..." wrapper method to ensure that any potential problems we encounter in evaluating the display name
// don't interfere with our ultimate goal within this code block: evaluation of the condition.
step.TryUpdateDisplayName(out _);
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
@@ -256,14 +259,6 @@ namespace GitHub.Runner.Worker
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)
{
// Check to see if we can expand the display name
if (step is IActionRunner actionRunner &&
actionRunner.Stage == ActionRunStage.Main &&
actionRunner.TryEvaluateDisplayName(step.ExecutionContext.ExpressionValues, step.ExecutionContext))
{
step.ExecutionContext.UpdateTimelineRecordDisplayName(actionRunner.DisplayName);
}
// Start the step
Trace.Info("Starting the step.");
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");

View File

@@ -1,20 +0,0 @@
using GitHub.Services.OAuth;
namespace GitHub.Services.Common
{
public static class VssCredentialsExtension
{
public static VssOAuthCredential ToOAuthCredentials(
this VssCredentials credentials)
{
if (credentials.Federated.CredentialType == VssCredentialsType.OAuth)
{
return credentials.Federated as VssOAuthCredential;
}
else
{
return null;
}
}
}
}

View File

@@ -12,20 +12,20 @@ namespace GitHub.Services.Common
public class RawHttpMessageHandler: HttpMessageHandler
{
public RawHttpMessageHandler(
VssOAuthCredential credentials)
FederatedCredential credentials)
: this(credentials, new RawClientHttpRequestSettings())
{
}
public RawHttpMessageHandler(
VssOAuthCredential credentials,
FederatedCredential credentials,
RawClientHttpRequestSettings settings)
: this(credentials, settings, new HttpClientHandler())
{
}
public RawHttpMessageHandler(
VssOAuthCredential credentials,
FederatedCredential credentials,
RawClientHttpRequestSettings settings,
HttpMessageHandler innerHandler)
{
@@ -56,7 +56,7 @@ namespace GitHub.Services.Common
/// <summary>
/// Gets the credentials associated with this handler.
/// </summary>
public VssOAuthCredential Credentials
public FederatedCredential Credentials
{
get;
private set;
@@ -111,7 +111,7 @@ namespace GitHub.Services.Common
// Ensure that we attempt to use the most appropriate authentication mechanism by default.
if (m_tokenProvider == null)
{
m_tokenProvider = this.Credentials.GetTokenProvider(request.RequestUri);
m_tokenProvider = this.Credentials.CreateTokenProvider(request.RequestUri, null, null);
}
}
@@ -254,7 +254,7 @@ namespace GitHub.Services.Common
private CredentialWrapper m_credentialWrapper;
private object m_thisLock;
private const Int32 m_maxAuthRetries = 3;
private VssOAuthTokenProvider m_tokenProvider;
private IssuedTokenProvider m_tokenProvider;
//.Net Core does not attempt NTLM schema on Linux, unless ICredentials is a CredentialCache instance
//This workaround may not be needed after this corefx fix is consumed: https://github.com/dotnet/corefx/pull/7923

View File

@@ -450,6 +450,8 @@ namespace GitHub.DistributedTask.WebApi
/// <param name="poolId"></param>
/// <param name="sessionId"></param>
/// <param name="lastMessageId"></param>
/// <param name="status"></param>
/// <param name="runnerVersion"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
[EditorBrowsable(EditorBrowsableState.Never)]
@@ -458,6 +460,7 @@ namespace GitHub.DistributedTask.WebApi
Guid sessionId,
long? lastMessageId = null,
TaskAgentStatus? status = null,
string runnerVersion = null,
object userState = null,
CancellationToken cancellationToken = default)
{
@@ -475,12 +478,16 @@ namespace GitHub.DistributedTask.WebApi
{
queryParams.Add("status", status.Value.ToString());
}
if (runnerVersion != null)
{
queryParams.Add("runnerVersion", runnerVersion);
}
return SendAsync<TaskAgentMessage>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 1),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken);

View File

@@ -42,9 +42,10 @@ namespace GitHub.DistributedTask.Pipelines
IList<String> fileTable,
TemplateToken jobOutputs,
IList<TemplateToken> defaults,
ActionsEnvironmentReference actionsEnvironment)
ActionsEnvironmentReference actionsEnvironment,
String messageType = JobRequestMessageTypes.PipelineAgentJobRequest)
{
this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest;
this.MessageType = messageType;
this.Plan = plan;
this.JobId = jobId;
this.JobDisplayName = jobDisplayName;

View File

@@ -164,13 +164,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepWith, token, 0, null, omitHeader: true);
@@ -455,7 +456,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private readonly String[] s_expressionValueNames = new[]
{
PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Needs,

View File

@@ -1,75 +0,0 @@
using System;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.WebApi.WebApi;
namespace GitHub.DistributedTask.WebApi
{
[ResourceArea(TaskResourceIds.AreaId)]
public class RunServiceHttpClient : RawHttpClientBase
{
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
: base(baseUrl, credentials)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
: base(baseUrl, credentials, settings)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, settings, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
}
public Task<Pipelines.AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new {
StreamID = messageId
};
var payloadJson = JsonUtility.ToString(payload);
var requestContent = new StringContent(payloadJson, System.Text.Encoding.UTF8, "application/json");
return SendAsync<Pipelines.AgentJobRequestMessage>(
httpMethod,
additionalHeaders: null,
requestUri: requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
}
}

View File

@@ -0,0 +1,49 @@
using Newtonsoft.Json;
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public sealed class RunnerRefreshMessage
{
public static readonly String MessageType = "RunnerRefresh";
[JsonConstructor]
internal RunnerRefreshMessage()
{
}
public RunnerRefreshMessage(
Int32 runnerId,
String targetVersion,
int? timeoutInSeconds = null)
{
this.RunnerId = runnerId;
this.TimeoutInSeconds = timeoutInSeconds ?? TimeSpan.FromMinutes(60).Seconds;
this.TargetVersion = targetVersion;
}
[DataMember]
public Int32 RunnerId
{
get;
private set;
}
[DataMember]
public int TimeoutInSeconds
{
get;
private set;
}
[DataMember]
public String TargetVersion
{
get;
private set;
}
}
}

View File

@@ -1,4 +1,4 @@
using GitHub.Services.Common;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Runtime.Serialization;
@@ -27,7 +27,6 @@ namespace GitHub.DistributedTask.WebApi
this.Type = type;
this.Name = name;
}
[DataMember]
public String Type
@@ -101,6 +100,7 @@ namespace GitHub.DistributedTask.WebApi
public static readonly String Summary = "DistributedTask.Core.Summary";
public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment";
public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog";
public static readonly String ResultsLog = "Results.Core.Log";
}
[GenerateAllConstants]

View File

@@ -0,0 +1,13 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class AcquireJobRequest
{
[DataMember(Name = "streamId", EmitDefaultValue = false)]
public string StreamID { get; set; }
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class CompleteJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
[DataMember(Name = "conclusion")]
public TaskResult Conclusion { get; set; }
[DataMember(Name = "outputs", EmitDefaultValue = false)]
public Dictionary<string, VariableValue> Outputs { get; set; }
[DataMember(Name = "stepResults", EmitDefaultValue = false)]
public IList<StepResult> StepResults { get; set; }
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class RenewJobRequest
{
[DataMember(Name = "planId", EmitDefaultValue = false)]
public Guid PlanID { get; set; }
[DataMember(Name = "jobId", EmitDefaultValue = false)]
public Guid JobID { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Runtime.Serialization;
namespace Sdk.RSWebApi.Contracts
{
[DataContract]
public class RenewJobResponse
{
[DataMember]
public DateTime LockedUntil
{
get;
internal set;
}
}
}

View File

@@ -0,0 +1,38 @@
using System;
using System.Runtime.Serialization;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
[DataContract]
public class StepResult
{
[DataMember(Name = "external_id", EmitDefaultValue = false)]
public Guid ExternalID { get; set; }
[DataMember(Name = "number", EmitDefaultValue = false)]
public int? Number { get; set; }
[DataMember(Name = "name", EmitDefaultValue = false)]
public string Name { get; set; }
[DataMember(Name = "status")]
public TimelineRecordState? Status { get; set; }
[DataMember(Name = "conclusion")]
public TaskResult? Conclusion { get; set; }
[DataMember(Name = "started_at", EmitDefaultValue = false)]
public DateTime? StartedAt { get; set; }
[DataMember(Name = "completed_at", EmitDefaultValue = false)]
public DateTime? CompletedAt { get; set; }
[DataMember(Name = "completed_log_url", EmitDefaultValue = false)]
public string CompletedLogURL { get; set; }
[DataMember(Name = "completed_log_lines", EmitDefaultValue = false)]
public long? CompletedLogLines { get; set; }
}
}

View File

@@ -0,0 +1,131 @@
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using Sdk.RSWebApi.Contracts;
using Sdk.WebApi.WebApi;
namespace GitHub.Actions.RunService.WebApi
{
public class RunServiceHttpClient : RawHttpClientBase
{
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials)
: base(baseUrl, credentials)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings)
: base(baseUrl, credentials, settings)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
VssOAuthCredential credentials,
RawClientHttpRequestSettings settings,
params DelegatingHandler[] handlers)
: base(baseUrl, credentials, settings, handlers)
{
}
public RunServiceHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
Boolean disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
}
public Task<AgentJobRequestMessage> GetJobMessageAsync(
Uri requestUri,
string messageId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new AcquireJobRequest
{
StreamID = messageId
};
requestUri = new Uri(requestUri, "acquirejob");
var requestContent = new ObjectContent<AcquireJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<AgentJobRequestMessage>(
httpMethod,
requestUri: requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
public Task CompleteJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs,
IList<StepResult> stepResults,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new CompleteJobRequest()
{
PlanID = planId,
JobID = jobId,
Conclusion = result,
Outputs = outputs,
StepResults = stepResults
};
requestUri = new Uri(requestUri, "completejob");
var requestContent = new ObjectContent<CompleteJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
public Task<RenewJobResponse> RenewJobAsync(
Uri requestUri,
Guid planId,
Guid jobId,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
var payload = new RenewJobRequest()
{
PlanID = planId,
JobID = jobId
};
requestUri = new Uri(requestUri, "renewjob");
var requestContent = new ObjectContent<RenewJobRequest>(payload, new VssJsonMediaTypeFormatter(true));
return SendAsync<RenewJobResponse>(
httpMethod,
requestUri,
content: requestContent,
cancellationToken: cancellationToken);
}
}
}

View File

@@ -0,0 +1,134 @@
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace GitHub.Services.Results.Contracts
{
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepSummaryURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string StepBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepSummaryURLResponse
{
[DataMember]
public string SummaryUrl;
[DataMember]
public long SoftSizeLimit;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepSummaryMetadataCreate
{
[DataMember]
public string StepBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public long Size;
[DataMember]
public string UploadedAt;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedJobLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLRequest
{
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string StepBackendId;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class GetSignedStepLogsURLResponse
{
[DataMember]
public string LogsUrl;
[DataMember]
public string BlobStorageType;
[DataMember]
public long SoftSizeLimit;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class JobLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class StepLogsMetadataCreate
{
[DataMember]
public string WorkflowRunBackendId;
[DataMember]
public string WorkflowJobRunBackendId;
[DataMember]
public string StepBackendId;
[DataMember]
public string UploadedAt;
[DataMember]
public long LineCount;
}
[DataContract]
[JsonObject(NamingStrategyType = typeof(SnakeCaseNamingStrategy))]
public class CreateMetadataResponse
{
[DataMember]
public bool Ok;
}
public static class BlobStorageTypes
{
public static readonly string AzureBlobStorage = "BLOB_STORAGE_TYPE_AZURE";
public static readonly string Unspecified = "BLOB_STORAGE_TYPE_UNSPECIFIED";
}
}

View File

@@ -101,6 +101,17 @@ namespace Sdk.WebApi.WebApi
}
}
protected Task<T> SendAsync<T>(
HttpMethod method,
Uri requestUri,
HttpContent content = null,
IEnumerable<KeyValuePair<String, String>> queryParameters = null,
Object userState = null,
CancellationToken cancellationToken = default(CancellationToken))
{
return SendAsync<T>(method, null, requestUri, content, queryParameters, userState, cancellationToken);
}
protected async Task<T> SendAsync<T>(
HttpMethod method,
IEnumerable<KeyValuePair<String, String>> additionalHeaders,

View File

@@ -0,0 +1,341 @@
using System;
using System.IO;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Services.Results.Contracts;
using System.Net.Http.Formatting;
using Sdk.WebApi.WebApi;
namespace GitHub.Services.Results.Client
{
public class ResultsHttpClient : RawHttpClientBase
{
public ResultsHttpClient(
Uri baseUrl,
HttpMessageHandler pipeline,
string token,
bool disposeHandler)
: base(baseUrl, pipeline, disposeHandler)
{
m_token = token;
m_resultsServiceUrl = baseUrl;
m_formatter = new JsonMediaTypeFormatter();
}
// Get Sas URL calls
private async Task<T> GetResultsSignedURLResponse<R, T>(Uri uri, CancellationToken cancellationToken, R request)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
return await ReadJsonContentAsync<T>(response, cancellationToken);
}
}
}
}
private async Task<GetSignedStepSummaryURLResponse> GetStepSummaryUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepSummaryURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString()
};
var getStepSummarySignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepSummarySignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepSummaryURLRequest, GetSignedStepSummaryURLResponse>(getStepSummarySignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedStepLogsURLResponse> GetStepLogUploadUrlAsync(string planId, string jobId, Guid stepId, CancellationToken cancellationToken)
{
var request = new GetSignedStepLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
};
var getStepLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetStepLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedStepLogsURLRequest, GetSignedStepLogsURLResponse>(getStepLogsSignedBlobURLEndpoint, cancellationToken, request);
}
private async Task<GetSignedJobLogsURLResponse> GetJobLogUploadUrlAsync(string planId, string jobId, CancellationToken cancellationToken)
{
var request = new GetSignedJobLogsURLRequest()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
};
var getJobLogsSignedBlobURLEndpoint = new Uri(m_resultsServiceUrl, Constants.GetJobLogsSignedBlobURL);
return await GetResultsSignedURLResponse<GetSignedJobLogsURLRequest, GetSignedJobLogsURLResponse>(getJobLogsSignedBlobURLEndpoint, cancellationToken, request);
}
// Create metadata calls
private async Task CreateMetadata<R>(Uri uri, CancellationToken cancellationToken, R request, string timestamp)
{
using (HttpRequestMessage requestMessage = new HttpRequestMessage(HttpMethod.Post, uri))
{
requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", m_token);
requestMessage.Headers.Accept.Add(MediaTypeWithQualityHeaderValue.Parse("application/json"));
using (HttpContent content = new ObjectContent<R>(request, m_formatter))
{
requestMessage.Content = content;
using (var response = await SendAsync(requestMessage, HttpCompletionOption.ResponseContentRead, cancellationToken: cancellationToken))
{
var jsonResponse = await ReadJsonContentAsync<CreateMetadataResponse>(response, cancellationToken);
if (!jsonResponse.Ok)
{
throw new Exception($"Failed to mark {typeof(R).Name} upload as complete, status code: {response.StatusCode}, ok: {jsonResponse.Ok}, timestamp: {timestamp}");
}
}
}
}
}
private async Task StepSummaryUploadCompleteAsync(string planId, string jobId, Guid stepId, long size, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepSummaryMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
Size = size,
UploadedAt = timestamp
};
var createStepSummaryMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepSummaryMetadata);
await CreateMetadata<StepSummaryMetadataCreate>(createStepSummaryMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task StepLogUploadCompleteAsync(string planId, string jobId, Guid stepId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new StepLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
StepBackendId = stepId.ToString(),
UploadedAt = timestamp,
LineCount = lineCount,
};
var createStepLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateStepLogsMetadata);
await CreateMetadata<StepLogsMetadataCreate>(createStepLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task JobLogUploadCompleteAsync(string planId, string jobId, long lineCount, CancellationToken cancellationToken)
{
var timestamp = DateTime.UtcNow.ToString(Constants.TimestampFormat);
var request = new JobLogsMetadataCreate()
{
WorkflowJobRunBackendId = jobId,
WorkflowRunBackendId = planId,
UploadedAt = timestamp,
LineCount = lineCount,
};
var createJobLogsMetadataEndpoint = new Uri(m_resultsServiceUrl, Constants.CreateJobLogsMetadata);
await CreateMetadata<JobLogsMetadataCreate>(createJobLogsMetadataEndpoint, cancellationToken, request, timestamp);
}
private async Task<HttpResponseMessage> UploadBlockFileAsync(string url, string blobStorageType, FileStream file, CancellationToken cancellationToken)
{
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureBlockBlob);
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> CreateAppendFileAsync(string url, string blobStorageType, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Put, url)
{
Content = new StringContent("")
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add(Constants.AzureBlobTypeHeader, Constants.AzureAppendBlob);
request.Content.Headers.Add("Content-Length", "0");
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to create append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}");
}
return response;
}
}
private async Task<HttpResponseMessage> UploadAppendFileAsync(string url, string blobStorageType, FileStream file, bool finalize, long fileSize, CancellationToken cancellationToken)
{
var comp = finalize ? "&comp=appendblock&seal=true" : "&comp=appendblock";
// Upload the file to the url
var request = new HttpRequestMessage(HttpMethod.Put, url + comp)
{
Content = new StreamContent(file)
};
if (blobStorageType == BlobStorageTypes.AzureBlobStorage)
{
request.Content.Headers.Add("Content-Length", fileSize.ToString());
request.Content.Headers.Add(Constants.AzureBlobSealedHeader, finalize.ToString());
}
using (var response = await SendAsync(request, HttpCompletionOption.ResponseHeadersRead, userState: null, cancellationToken))
{
if (!response.IsSuccessStatusCode)
{
throw new Exception($"Failed to upload append file, status code: {response.StatusCode}, reason: {response.ReasonPhrase}, object: {response}, fileSize: {fileSize}");
}
return response;
}
}
// Handle file upload for step summary
public async Task UploadStepSummaryAsync(string planId, string jobId, Guid stepId, string file, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepSummaryUploadUrlAsync(planId, jobId, stepId, cancellationToken);
if (uploadUrlResponse == null)
{
throw new Exception("Failed to get step summary upload url");
}
// Do we want to throw an exception here or should we just be uploading/truncating the data
var fileSize = new FileInfo(file).Length;
if (fileSize > uploadUrlResponse.SoftSizeLimit)
{
throw new Exception($"File size is larger than the upload url allows, file size: {fileSize}, upload url size: {uploadUrlResponse.SoftSizeLimit}");
}
// Upload the file
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadBlockFileAsync(uploadUrlResponse.SummaryUrl, uploadUrlResponse.BlobStorageType, fileStream, cancellationToken);
}
// Send step summary upload complete message
await StepSummaryUploadCompleteAsync(planId, jobId, stepId, fileSize, cancellationToken);
}
// Handle file upload for step log
public async Task UploadResultsStepLogAsync(string planId, string jobId, Guid stepId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetStepLogUploadUrlAsync(planId, jobId, stepId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get step log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await StepLogUploadCompleteAsync(planId, jobId, stepId, lineCount, cancellationToken);
}
}
// Handle file upload for job log
public async Task UploadResultsJobLogAsync(string planId, string jobId, string file, bool finalize, bool firstBlock, long lineCount, CancellationToken cancellationToken)
{
// Get the upload url
var uploadUrlResponse = await GetJobLogUploadUrlAsync(planId, jobId, cancellationToken);
if (uploadUrlResponse == null || uploadUrlResponse.LogsUrl == null)
{
throw new Exception("Failed to get job log upload url");
}
// Create the Append blob
if (firstBlock)
{
await CreateAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, cancellationToken);
}
// Upload content
var fileSize = new FileInfo(file).Length;
using (var fileStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true))
{
var response = await UploadAppendFileAsync(uploadUrlResponse.LogsUrl, uploadUrlResponse.BlobStorageType, fileStream, finalize, fileSize, cancellationToken);
}
// Update metadata
if (finalize)
{
// Send step log upload complete message
await JobLogUploadCompleteAsync(planId, jobId, lineCount, cancellationToken);
}
}
private MediaTypeFormatter m_formatter;
private Uri m_resultsServiceUrl;
private string m_token;
}
// Constants specific to results
public static class Constants
{
public static readonly string TimestampFormat = "yyyy-MM-dd'T'HH:mm:ss.fffK";
public static readonly string ResultsReceiverTwirpEndpoint = "twirp/results.services.receiver.Receiver/";
public static readonly string GetStepSummarySignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepSummarySignedBlobURL";
public static readonly string CreateStepSummaryMetadata = ResultsReceiverTwirpEndpoint + "CreateStepSummaryMetadata";
public static readonly string GetStepLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetStepLogsSignedBlobURL";
public static readonly string CreateStepLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateStepLogsMetadata";
public static readonly string GetJobLogsSignedBlobURL = ResultsReceiverTwirpEndpoint + "GetJobLogsSignedBlobURL";
public static readonly string CreateJobLogsMetadata = ResultsReceiverTwirpEndpoint + "CreateJobLogsMetadata";
public static readonly string AzureBlobSealedHeader = "x-ms-blob-sealed";
public static readonly string AzureBlobTypeHeader = "x-ms-blob-type";
public static readonly string AzureBlockBlob = "BlockBlob";
public static readonly string AzureAppendBlob = "AppendBlob";
}
}

View File

@@ -824,6 +824,7 @@ namespace GitHub.Runner.Common.Tests
[InlineData("remove", "version")]
[InlineData("remove", "commit")]
[InlineData("remove", "check")]
[InlineData("remove", "local")]
[InlineData("run", "help")]
[InlineData("run", "version")]
[InlineData("run", "commit")]

View File

@@ -1,12 +1,17 @@
using System;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Listener;
using GitHub.Runner.Listener.Configuration;
using GitHub.Services.WebApi;
using Moq;
using Sdk.RSWebApi.Contracts;
using Xunit;
using Pipelines = GitHub.DistributedTask.Pipelines;
@@ -18,6 +23,8 @@ namespace GitHub.Runner.Common.Tests.Listener
private Mock<IProcessChannel> _processChannel;
private Mock<IProcessInvoker> _processInvoker;
private Mock<IRunnerServer> _runnerServer;
private Mock<IRunServer> _runServer;
private Mock<IConfigurationStore> _configurationStore;
public JobDispatcherL0()
@@ -25,6 +32,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_processChannel = new Mock<IProcessChannel>();
_processInvoker = new Mock<IProcessInvoker>();
_runnerServer = new Mock<IRunnerServer>();
_runServer = new Mock<IRunServer>();
_configurationStore = new Mock<IConfigurationStore>();
}
@@ -139,7 +147,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully);
_runnerServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny<int>(), It.IsAny<long>(), It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
@@ -197,7 +205,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -205,6 +213,75 @@ namespace GitHub.Runner.Common.Tests.Listener
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions()
{
//Arrange
using (var hc = new TestHostContext(this))
{
int poolId = 1;
Int64 requestId = 1000;
int count = 0;
var trace = hc.GetTrace(nameof(DispatcherRenewJobOnRunServiceStopOnJobNotFoundExceptions));
TaskCompletionSource<int> firstJobRequestRenewed = new();
CancellationTokenSource cancellationTokenSource = new();
TaskAgentJobRequest request = new();
PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
Assert.NotNull(lockUntilProperty);
lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5));
hc.SetSingleton<IRunServer>(_runServer.Object);
hc.SetSingleton<IConfigurationStore>(_configurationStore.Object);
_configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 });
_ = _runServer.Setup(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.Returns(() =>
{
count++;
if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully)
{
trace.Info("First renew happens.");
}
if (count < 5)
{
var response = new RenewJobResponse()
{
LockedUntil = request.LockedUntil.Value
};
return Task.FromResult<RenewJobResponse>(response);
}
else if (count == 5)
{
cancellationTokenSource.CancelAfter(10000);
throw new TaskOrchestrationJobNotFoundException("");
}
else
{
throw new InvalidOperationException("Should not reach here.");
}
});
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
EnableRunServiceJobForJobDispatcher(jobDispatcher);
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
await jobDispatcher.RenewJobRequestAsync(GetAgentJobRequestMessage(), GetServiceEndpoint(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
_runServer.Verify(x => x.RenewJobAsync(It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<CancellationToken>()), Times.Exactly(5));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
@@ -256,7 +333,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -312,8 +389,9 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.Is<RunnerSettings>(settings => settings.AgentName == newName)), Times.Once);
@@ -368,7 +446,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -421,7 +499,7 @@ namespace GitHub.Runner.Common.Tests.Listener
jobDispatcher.Initialize(hc);
// Act
await jobDispatcher.RenewJobRequestAsync(0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), 0, 0, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
// Assert
_configurationStore.Verify(x => x.SaveSettings(It.IsAny<RunnerSettings>()), Times.Never);
@@ -479,7 +557,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.True(cancellationTokenSource.IsCancellationRequested);
@@ -536,7 +614,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -600,7 +678,7 @@ namespace GitHub.Runner.Common.Tests.Listener
var jobDispatcher = new JobDispatcher();
jobDispatcher.Initialize(hc);
await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
await jobDispatcher.RenewJobRequestAsync(It.IsAny<AgentJobRequestMessage>(), It.IsAny<ServiceEndpoint>(), poolId, requestId, Guid.Empty, Guid.NewGuid().ToString(), firstJobRequestRenewed, cancellationTokenSource.Token);
Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed.");
Assert.False(cancellationTokenSource.IsCancellationRequested);
@@ -659,5 +737,78 @@ namespace GitHub.Runner.Common.Tests.Listener
Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent.");
}
}
private static void EnableRunServiceJobForJobDispatcher(JobDispatcher jobDispatcher)
{
// Set the value of the _isRunServiceJob field to true
var isRunServiceJobField = typeof(JobDispatcher).GetField("_isRunServiceJob", BindingFlags.NonPublic | BindingFlags.Instance);
isRunServiceJobField.SetValue(jobDispatcher, true);
}
private static ServiceEndpoint GetServiceEndpoint()
{
var serviceEndpoint = new ServiceEndpoint
{
Authorization = new EndpointAuthorization
{
Scheme = EndpointAuthorizationSchemes.OAuth
}
};
serviceEndpoint.Authorization.Parameters.Add("AccessToken", "token");
return serviceEndpoint;
}
private static AgentJobRequestMessage GetAgentJobRequestMessage()
{
var message = new AgentJobRequestMessage(
new TaskOrchestrationPlanReference()
{
PlanType = "Build",
PlanId = Guid.NewGuid(),
Version = 1
},
new TimelineReference()
{
Id = Guid.NewGuid()
},
Guid.NewGuid(),
"jobDisplayName",
"jobName",
null,
null,
new List<TemplateToken>(),
new Dictionary<string, VariableValue>()
{
{
"variables",
new VariableValue()
{
IsSecret = false,
Value = "variables"
}
}
},
new List<MaskHint>()
{
new MaskHint()
{
Type = MaskType.Variable,
Value = "maskHints"
}
},
new JobResources(),
new DictionaryContextData(),
new WorkspaceOptions(),
new List<JobStep>(),
new List<string>()
{
"fileTable"
},
null,
new List<TemplateToken>(),
new ActionsEnvironmentReference("env")
);
return message;
}
}
}

View File

@@ -192,8 +192,8 @@ namespace GitHub.Runner.Common.Tests.Listener
_runnerServer
.Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<CancellationToken>()))
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, CancellationToken cancellationToken) =>
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, TaskAgentStatus status, string runnerVersion, CancellationToken cancellationToken) =>
{
await Task.Yield();
return messages.Dequeue();
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Listener
//Assert
_runnerServer
.Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Exactly(arMessages.Length));
}
}
@@ -293,7 +293,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_runnerServer
.Setup(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<CancellationToken>()))
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Throws(new TaskAgentAccessTokenExpiredException("test"));
try
{
@@ -311,7 +311,7 @@ namespace GitHub.Runner.Common.Tests.Listener
//Assert
_runnerServer
.Verify(x => x.GetAgentMessageAsync(
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<CancellationToken>()), Times.Once);
_settings.PoolId, expectedSession.SessionId, It.IsAny<long?>(), TaskAgentStatus.Online, It.IsAny<string>(), It.IsAny<CancellationToken>()), Times.Once);
_runnerServer
.Verify(x => x.DeleteAgentSessionAsync(

View File

@@ -502,5 +502,34 @@ namespace GitHub.Runner.Common.Tests.Listener
_messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny<TaskAgentMessage>()), Times.Once());
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async void TestRemoveLocalRunnerConfig()
{
using (var hc = new TestHostContext(this))
{
hc.SetSingleton<IConfigurationManager>(_configurationManager.Object);
hc.SetSingleton<IConfigurationStore>(_configStore.Object);
hc.SetSingleton<IPromptManager>(_promptManager.Object);
var command = new CommandSettings(hc, new[] { "remove", "--local" });
_configStore.Setup(x => x.IsConfigured())
.Returns(true);
_configStore.Setup(x => x.HasCredentials())
.Returns(true);
var runner = new Runner.Listener.Runner();
runner.Initialize(hc);
await runner.ExecuteCommand(command);
// verify that we delete the local runner config with the correct remove parameter
_configurationManager.Verify(x => x.DeleteLocalRunnerConfig(), Times.Once());
}
}
}
}

View File

@@ -128,7 +128,76 @@ namespace GitHub.Runner.Common.Tests
}
}
}
#if OS_WINDOWS
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task SetTestEnvWithNullInKey()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
Int32 exitCode = -1;
var processInvoker = new ProcessInvokerWrapper();
processInvoker.Initialize(hc);
var stdout = new List<string>();
var stderr = new List<string>();
processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) =>
{
trace.Info(e.Data);
stdout.Add(e.Data);
};
processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs e) =>
{
trace.Info(e.Data);
stderr.Add(e.Data);
};
exitCode = await processInvoker.ExecuteAsync("", "cmd.exe", "/c \"echo %TEST%\"", new Dictionary<string, string>() { { "TEST\0second", "first" } }, CancellationToken.None);
trace.Info("Exit Code: {0}", exitCode);
Assert.Equal(0, exitCode);
Assert.Equal("first", stdout.First(x => !string.IsNullOrWhiteSpace(x)));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public async Task SetTestEnvWithNullInValue()
{
using (TestHostContext hc = new(this))
{
Tracing trace = hc.GetTrace();
Int32 exitCode = -1;
var processInvoker = new ProcessInvokerWrapper();
processInvoker.Initialize(hc);
var stdout = new List<string>();
var stderr = new List<string>();
processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) =>
{
trace.Info(e.Data);
stdout.Add(e.Data);
};
processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs e) =>
{
trace.Info(e.Data);
stderr.Add(e.Data);
};
exitCode = await processInvoker.ExecuteAsync("", "cmd.exe", "/c \"echo %TEST%\"", new Dictionary<string, string>() { { "TEST", "first\0second" } }, CancellationToken.None);
trace.Info("Exit Code: {0}", exitCode);
Assert.Equal(0, exitCode);
Assert.Equal("first", stdout.First(x => !string.IsNullOrWhiteSpace(x)));
}
}
#endif
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]

View File

@@ -351,7 +351,7 @@ namespace GitHub.Runner.Common.Tests
Assert.False(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.False(proxy.IsBypassed(new Uri("https://ggithub.com")));
Assert.False(proxy.IsBypassed(new Uri("https://github.comm")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com")));
Assert.False(proxy.IsBypassed(new Uri("https://google.com"))); // no_proxy has '.google.com', specifying only subdomains bypass
Assert.False(proxy.IsBypassed(new Uri("https://example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com:333")));
Assert.False(proxy.IsBypassed(new Uri("http://192.168.0.123:123")));
@@ -374,6 +374,76 @@ namespace GitHub.Runner.Common.Tests
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void BypassAllOnWildcardNoProxy()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com, * , example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void IgnoreWildcardInNoProxySubdomain()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "*.example.com");
var proxy = new RunnerWebProxy();
Assert.False(proxy.IsBypassed(new Uri("http://sub.example.com")));
Assert.False(proxy.IsBypassed(new Uri("http://example.com")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WildcardNoProxyWorksWhenOtherNoProxyAreAround()
{
try
{
Environment.SetEnvironmentVariable("http_proxy", "http://user1:pass1%40@127.0.0.1:8888");
Environment.SetEnvironmentVariable("https_proxy", "http://user2:pass2%40@127.0.0.1:9999");
Environment.SetEnvironmentVariable("no_proxy", "example.com,*,example2.com");
var proxy = new RunnerWebProxy();
Assert.True(proxy.IsBypassed(new Uri("http://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("http://localhost")));
Assert.True(proxy.IsBypassed(new Uri("http://127.0.0.1:8080")));
Assert.True(proxy.IsBypassed(new Uri("https://actions.com")));
Assert.True(proxy.IsBypassed(new Uri("https://localhost")));
Assert.True(proxy.IsBypassed(new Uri("https://127.0.0.1:8080")));
}
finally
{
CleanProxyEnv();
}
}
[Fact]
[Trait("Level", "L0")]

View File

@@ -59,7 +59,7 @@ namespace GitHub.Runner.Common.Tests
_secretMasker = new SecretMasker();
_secretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
_secretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
_traceManager = new TraceManager(traceListener, _secretMasker);
_traceManager = new TraceManager(traceListener, null, _secretMasker);
_trace = GetTrace(nameof(TestHostContext));
// inject a terminal in silent mode so all console output

Some files were not shown because too many files have changed in this diff Show More