mirror of
https://github.com/actions/runner.git
synced 2025-12-10 20:36:49 +00:00
Compare commits
67 Commits
v2.164.0
...
users/tihu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ca5ee0fbd | ||
|
|
88875ca1b0 | ||
|
|
a5eb8cb5c4 | ||
|
|
41f4ca3414 | ||
|
|
aa9f5bf070 | ||
|
|
2d6042421f | ||
|
|
c8890d0f3f | ||
|
|
53fb6297cb | ||
|
|
f9b5d626c5 | ||
|
|
d34afb54b1 | ||
|
|
e291ebc58a | ||
|
|
6bec1e3bb8 | ||
|
|
0cba42590f | ||
|
|
94e7560ccd | ||
|
|
d80ab095a5 | ||
|
|
2efd6f70e2 | ||
|
|
a6f144b014 | ||
|
|
5294a3ee06 | ||
|
|
745b90a8b2 | ||
|
|
0db908da8d | ||
|
|
68de3a94be | ||
|
|
a0a590fb48 | ||
|
|
87a232c477 | ||
|
|
a3c2479a29 | ||
|
|
c45aebc9ab | ||
|
|
b676ab3d33 | ||
|
|
0a6bac355d | ||
|
|
eb78d19b17 | ||
|
|
17970ad1f9 | ||
|
|
2e0e8eb822 | ||
|
|
2a506cc556 | ||
|
|
43dd34820b | ||
|
|
746c9d9ec0 | ||
|
|
fa2ecfcc4c | ||
|
|
c59c0e2ded | ||
|
|
7a382facb3 | ||
|
|
e9ae42693f | ||
|
|
9cafe8c028 | ||
|
|
1484c3fb03 | ||
|
|
53d632706d | ||
|
|
d6179242ca | ||
|
|
0da38a6924 | ||
|
|
b19e5d7924 | ||
|
|
80ac4a8964 | ||
|
|
02639a2092 | ||
|
|
a727194742 | ||
|
|
a9c58d7398 | ||
|
|
e15414eb5e | ||
|
|
4ab1e645c3 | ||
|
|
584f6b6ca3 | ||
|
|
abc65839f3 | ||
|
|
06292aa118 | ||
|
|
ac1a076a3b | ||
|
|
300bc67950 | ||
|
|
289c7f36a2 | ||
|
|
b89d7fb8ef | ||
|
|
5fd705bb84 | ||
|
|
9e37732401 | ||
|
|
6c70d53eea | ||
|
|
f791e2d512 | ||
|
|
f1e36651ad | ||
|
|
be24fea81b | ||
|
|
84ca2c05ce | ||
|
|
2249560cec | ||
|
|
2d4b821abe | ||
|
|
371bf8e607 | ||
|
|
9ba11da490 |
10
.github/ISSUE_TEMPLATE.md
vendored
10
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,10 +0,0 @@
|
||||
## Runner Version and Platform
|
||||
Version of your runner?
|
||||
|
||||
OS of the machine running the runner? OSX/Windows/Linux/...
|
||||
|
||||
## What's not working?
|
||||
Please include error messages and screenshots.
|
||||
|
||||
## Runner and Worker's Diagnostic Logs
|
||||
Logs are located in the runner's `_diag` folder. The runner logs are prefixed with `Runner_` and the worker logs are prefixed with `Worker_`. All sensitive information should already be masked out, but please double-check before pasting here.
|
||||
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
34
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Run '....'
|
||||
3. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
## Runner Version and Platform
|
||||
Version of your runner?
|
||||
|
||||
OS of the machine running the runner? OSX/Windows/Linux/...
|
||||
|
||||
## What's not working?
|
||||
Please include error messages and screenshots.
|
||||
|
||||
## Job Log Output
|
||||
If applicable, include the relevant part of the job / step log output here. All sensitive information should already be masked out, but please double-check before pasting here.
|
||||
|
||||
## Runner and Worker's Diagnostic Logs
|
||||
If applicable, add relevant diagnostic log information. Logs are located in the runner's `_diag` folder. The runner logs are prefixed with `Runner_` and the worker logs are prefixed with `Worker_`. Each job run correlates to a worker log. All sensitive information should already be masked out, but please double-check before pasting here.
|
||||
27
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Create a request to help us improve
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Thank you 🙇♀ for wanting to create a feature in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
|
||||
|
||||
* If you have found a security issue [please submit it here](https://hackerone.com/github)
|
||||
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
|
||||
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
|
||||
|
||||
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
|
||||
|
||||
**Describe the enhancement**
|
||||
A clear and concise description of what the features or enhancement you need.
|
||||
|
||||
**Code Snippet**
|
||||
If applicable, add a code snippet.
|
||||
|
||||
**Additional information**
|
||||
Add any other context about the feature here.
|
||||
|
||||
NOTE: if the feature request has been agreed upon then the assignee will create an ADR. See docs/adrs/README.md
|
||||
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
@@ -5,9 +5,13 @@ on:
|
||||
branches:
|
||||
- master
|
||||
- releases/*
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- '*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,12 +1,19 @@
|
||||
# build output
|
||||
**/bin
|
||||
**/obj
|
||||
**/libs
|
||||
**/lib
|
||||
|
||||
# editors
|
||||
**/*.xproj
|
||||
**/*.xproj.user
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.error
|
||||
**/*.json.pretty
|
||||
.idea/
|
||||
|
||||
# output
|
||||
node_modules
|
||||
_downloads
|
||||
_layout
|
||||
@@ -19,4 +26,3 @@ TestLogs
|
||||
|
||||
#generated
|
||||
src/Runner.Sdk/BuildConstants.cs
|
||||
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -1,5 +1,5 @@
|
||||
The MIT License (MIT)
|
||||
Copyright (c) Microsoft Corporation
|
||||
Copyright (c) 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -17,4 +17,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
SOFTWARE.
|
||||
|
||||
26
README.md
26
README.md
@@ -1,31 +1,25 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/res/github-graph.png">
|
||||
</p>
|
||||
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
|
||||
|
||||
## Get Started
|
||||
|
||||
 [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases/latest)
|
||||
For more information about installing and using self-hosted runners, see [Adding self-hosted runners](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners) and [Using self-hosted runners in a workflow](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-self-hosted-runners-in-a-workflow)
|
||||
|
||||
 [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases/latest)
|
||||
Runner releases:
|
||||
|
||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases/latest)
|
||||
 [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
**Configure:**
|
||||
 [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
*MacOS and Linux*
|
||||
```bash
|
||||
./config.sh
|
||||
```
|
||||
|
||||
*Windows*
|
||||
```bash
|
||||
config.cmd
|
||||
```
|
||||
 [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
|
||||
|
||||
## Contribute
|
||||
|
||||
For developers that want to contribute, [read here](docs/contribute.md) on how to build and test.
|
||||
We accept contributions in the form of issues and pull requests. [Read more here](docs/contribute.md) before contributing.
|
||||
|
||||
61
docs/adrs/0263-proxy-support.md
Normal file
61
docs/adrs/0263-proxy-support.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# ADR 263: Self Hosted Runner Proxies
|
||||
|
||||
**Date**: 2019-11-13
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
- Proxy support is required for some enterprises and organizations to start using their own self hosted runners
|
||||
- While there is not a standard convention, many applications support setting proxies via the environmental variables `http_proxy`, `https_proxy`, `no_proxy`, such as curl, wget, perl, python, docker, git, R, ect
|
||||
- Some of these applications use `HTTPS_PROXY` versus `https_proxy`, but most understand or primarily support the lowercase variant
|
||||
|
||||
## Decision
|
||||
|
||||
We will update the Runner to use the conventional environment variables for proxies: `http_proxy`, `https_proxy` and `no_proxy` if they are set.
|
||||
These are described in detail below:
|
||||
- `https_proxy` a proxy URL for all https traffic. It may contain basic authentication credentials. For example:
|
||||
- http://proxy.com
|
||||
- http://127.0.0.1:8080
|
||||
- http://user:password@proxy.com
|
||||
- `http_proxy` a proxy URL for all http traffic. It may contain basic authentication credentials. For example:
|
||||
- http://proxy.com
|
||||
- http://127.0.0.1:8080
|
||||
- http://user:password@proxy.com
|
||||
- `no_proxy` a comma seperated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `google.com`
|
||||
- `yahoo.com:443`
|
||||
- `google.com,bing.com`
|
||||
|
||||
We won't use `http_proxy` for https traffic when `https_proxy` is not set, this behavior lines up with any libcurl based tools (curl, git) and wget.
|
||||
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||
|
||||
Example:
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to server without any problem. However, if user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it require `https_proxy` to be set for any https traffic.
|
||||
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic base on my research.
|
||||
|
||||
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
|
||||
|
||||
A password provided for a proxy will be masked in the logs.
|
||||
|
||||
We will support the lowercase and uppercase variants, with lowercase taking priority if both are set.
|
||||
|
||||
### No Proxy Format
|
||||
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insentive matches, and not support wildcards at this time.
|
||||
For example:
|
||||
- example.com will match example.com, foo.example.com, foo.bar.example.com
|
||||
- foo.example.com will match bar.foo.example.com and foo.example.com
|
||||
|
||||
We will not support IP addresses for `no_proxy`, only hostnames.
|
||||
|
||||
## Consequences
|
||||
|
||||
1. Enterprises and organizations needing proxy support will be able to embrace self hosted runners
|
||||
2. Users will need to set these environmental variables before configuring the runner in order to use a proxy when configuring
|
||||
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
|
||||
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
|
||||
5. Action authors may need to update their workflows to react to the these environment variables
|
||||
6. We will document the way of setting environmental variables for runners using the environmental variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# ADR 0274: Step outcome and conclusion
|
||||
|
||||
**Date**: 2020-01-13
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
|
||||
|
||||
This allows downstream a step to run based on whether a previous step succeeded or failed.
|
||||
|
||||
Reminder, currently the steps contains `steps.<id>.outputs`.
|
||||
|
||||
## Decision
|
||||
|
||||
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
|
||||
|
||||
- `success`
|
||||
- `failure`
|
||||
- `cancelled`
|
||||
- `skipped`
|
||||
|
||||
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
|
||||
|
||||
### Example
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
|
||||
- id: experimental
|
||||
continue-on-error: true
|
||||
run: ./build.sh experimental
|
||||
|
||||
- if: ${{ steps.experimental.outcome == 'success' }}
|
||||
run: ./publish.sh experimental
|
||||
```
|
||||
|
||||
### Terminology
|
||||
|
||||
The runs API uses the term `conclusion`.
|
||||
|
||||
Therefore we use a different term `outcome` for the value prior to continue-on-error.
|
||||
|
||||
The following is a snippet from the runs API response payload:
|
||||
|
||||
```json
|
||||
"steps": [
|
||||
{
|
||||
"name": "Set up job",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
"number": 1,
|
||||
"started_at": "2020-01-09T11:06:16.000-05:00",
|
||||
"completed_at": "2020-01-09T11:06:18.000-05:00"
|
||||
},
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
- Update runner
|
||||
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)
|
||||
263
docs/adrs/0276-problem-matchers.md
Normal file
263
docs/adrs/0276-problem-matchers.md
Normal file
@@ -0,0 +1,263 @@
|
||||
# ADR 0276: Problem Matchers
|
||||
|
||||
**Date** 2019-06-05
|
||||
|
||||
**Status** Accepted
|
||||
|
||||
## Context
|
||||
|
||||
Compilation failures during a CI build should surface good error messages.
|
||||
|
||||
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
|
||||
|
||||
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||
|
||||
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
|
||||
|
||||
## Decision
|
||||
|
||||
### Registration
|
||||
|
||||
#### Using `##` command
|
||||
|
||||
`##[add-matcher]path-to-problem-matcher-config.json`
|
||||
|
||||
Using a `##` command allows for flexibility:
|
||||
- Ad hoc scripts can register problem matchers
|
||||
- Allows problem matchers to be conditionally registered
|
||||
|
||||
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
|
||||
|
||||
#### Unregister using `##` command
|
||||
|
||||
A way out for rare cases where scoping is a problem.
|
||||
|
||||
`##[remove-matcher]owner`
|
||||
|
||||
For the this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
|
||||
### Single line matcher
|
||||
|
||||
Consider the output:
|
||||
|
||||
```
|
||||
[...]
|
||||
|
||||
Build FAILED.
|
||||
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
|
||||
(CoreCompile target) ->
|
||||
Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||
|
||||
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
|
||||
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
|
||||
(CoreCompile target) ->
|
||||
Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||
|
||||
1 Warning(s)
|
||||
1 Error(s)
|
||||
```
|
||||
|
||||
The below match configuration uses a regular expression to discover problem lines. And the match groups are mapped into issue-properties.
|
||||
|
||||
```json
|
||||
"owner": "msbuild",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*([^:]+)\\((\\d+),(\\d+)\\): (error|warning) ([^:]+): (.*) \\[(.+)\\]$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"code": 5,
|
||||
"message": 6,
|
||||
"fromPath": 7
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
The above output and match configuration produces the following matches:
|
||||
|
||||
```
|
||||
line: Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||
file: Class1.cs
|
||||
line: 16
|
||||
column: 24
|
||||
severity: warning
|
||||
code: CS0612
|
||||
message: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete
|
||||
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||
```
|
||||
|
||||
```
|
||||
line: Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||
file: Helpers\MyHelper.cs
|
||||
line: 16
|
||||
column: 30
|
||||
severity: error
|
||||
code: CS1002
|
||||
message: ; expected
|
||||
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||
```
|
||||
|
||||
Additionally the line will appear red in the web UI (prefix with `##[error]`).
|
||||
|
||||
Note, an error does not imply task failure. Exit codes communicate failure.
|
||||
|
||||
Note, strip color codes when evaluating regular expressions.
|
||||
|
||||
### Multi-line matcher
|
||||
|
||||
Consider the below output from ESLint in stylish mode. The file name is printed once, yet multiple error lines are printed.
|
||||
|
||||
```
|
||||
test.js
|
||||
1:0 error Missing "use strict" statement strict
|
||||
5:10 error 'addOne' is defined but never used no-unused-vars
|
||||
✖ 2 problems (2 errors, 0 warnings)
|
||||
```
|
||||
|
||||
The below match configuration uses multiple regular expressions, for the multiple lines.
|
||||
|
||||
And the last pattern of a multiline matcher can specify the `loop` property. This allows multiple errors to be discovered.
|
||||
|
||||
```json
|
||||
"owner": "eslint-stylish",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^([^\\s].*)$",
|
||||
"file": 1
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s+(\\d+):(\\d+)\\s+(error|warning|info)\\s+(.*)\\s\\s+(.*)$",
|
||||
"line": 1,
|
||||
"column": 2,
|
||||
"severity": 3,
|
||||
"message": 4,
|
||||
"code": 5,
|
||||
"loop": true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
The above output and match configuration produces two matches:
|
||||
|
||||
```
|
||||
line: 1:0 error Missing "use strict" statement strict
|
||||
file: test.js
|
||||
line: 1
|
||||
column: 0
|
||||
severity: error
|
||||
message: Missing "use strict" statement
|
||||
code: strict
|
||||
```
|
||||
|
||||
```
|
||||
line: 5:10 error 'addOne' is defined but never used no-unused-vars
|
||||
file: test.js
|
||||
line: 5
|
||||
column: 10
|
||||
severity: error
|
||||
message: 'addOne' is defined but never used
|
||||
code: no-unused-vars
|
||||
```
|
||||
|
||||
Note, in the above example only the error line will appear red in the web UI. The \"file\" line will not appear red.
|
||||
|
||||
### Other details
|
||||
|
||||
#### Configuration `owner`
|
||||
|
||||
Can be used to stomp over or remove.
|
||||
|
||||
#### Rooting the file
|
||||
|
||||
The goal of the file information is to provide a hyperlink in the UI.
|
||||
|
||||
Solving this problem means:
|
||||
- Rooting the file when unrooted:
|
||||
- Use the `fromPath` if specified (assume file path)
|
||||
- Use the `github.workspace` (where the repo is cloned on disk)
|
||||
- Match against a repository to determine the relative path within the repo
|
||||
|
||||
This is a place where we diverge from VSCode. VSCode task configuration are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||
|
||||
In order to avoid creating inaccurate hyperlinks on the error issues, the agent will verify the file exists and is in the main repository. Otherwise omit the file property from the error issue and debug trace what happened.
|
||||
|
||||
#### Supported severity levels
|
||||
|
||||
Ordinal ignore case:
|
||||
|
||||
- `warning`
|
||||
- `error`
|
||||
|
||||
Coalesce empty with \"error\". For any other values, omit logging an issue and debug trace what happened.
|
||||
|
||||
#### Default severity level
|
||||
|
||||
Problem matchers are unable to interpret severity strings other than `warning` and `error`. The `severity` match group expects `warning` or `error` (case insensitive).
|
||||
|
||||
However some tools indicate error/warning in different ways. For example `flake8` uses codes like `E100`, `W200`, and `F300` (error, warning, fatal, respectively).
|
||||
|
||||
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers are registered - one for warnings and one for errors.
|
||||
|
||||
For example, given the following `flake8` output:
|
||||
|
||||
```
|
||||
./bootcamp/settings.py:156:80: E501 line too long (94 > 79 characters)
|
||||
./bootcamp/settings.py:165:5: F403 'from local_settings import *' used; unable to detect undefined names
|
||||
```
|
||||
|
||||
Two problem matchers can be used:
|
||||
|
||||
```json
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+): ([EF]\\d+) (.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"code": 4,
|
||||
"message": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warnings",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+): (W\\d+) (.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"code": 4,
|
||||
"message": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Mitigate regular expression denial of service (ReDos)
|
||||
|
||||
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||
After three unsuccessful attempts, warn and eject the matcher. The matcher will not run again for the duration of the job.
|
||||
|
||||
### Where we diverge from VSCode
|
||||
|
||||
- We added the `fromPath` concept for rooting paths. This is done differently in VSCode, since a task is the scope (root path well known). For us, the job is the scope.
|
||||
- VSCode allows additional activation info background tasks that are always running (recompile on files changed). They allow regular expressions to define when the matcher scope begins and ends. This is an interesting concept that we could leverage to help solve our scoping problem.
|
||||
|
||||
## Consequences
|
||||
|
||||
- Setup actions should register problem matchers
|
||||
93
docs/adrs/0277-run-action-shell-options.md
Normal file
93
docs/adrs/0277-run-action-shell-options.md
Normal file
@@ -0,0 +1,93 @@
|
||||
# ADR 0277: Run action shell option
|
||||
|
||||
**Date** 2019-07-09
|
||||
|
||||
**Status** Accepted
|
||||
|
||||
## Context
|
||||
run-actions run scripts using a platform specific shell:
|
||||
`bash -eo pipefail` on non-windows, and `cmd.exe /c /d /s` on windows
|
||||
|
||||
The `shell` option overwrites this to allow different flags or completely different shells/interpreters
|
||||
|
||||
A small example is:
|
||||
```yml
|
||||
jobs:
|
||||
bash-job:
|
||||
actions:
|
||||
- run: echo "Hello"
|
||||
shell: bash
|
||||
python-job:
|
||||
actions:
|
||||
- run: print("Hello")
|
||||
shell: python {0}
|
||||
```
|
||||
|
||||
## Decision
|
||||
|
||||
___
|
||||
|
||||
### Shell option
|
||||
The keyword being used is `shell`
|
||||
|
||||
`shell` can be either:
|
||||
|
||||
1. Builtins / Explicitly supported keywords. It is useful to support at least `cmd`, and `powershell` on Windows. Because `cmd my_cmd_script` and `powershell my_ps1_script` are not valid the same way many Linux/cross-platform interpreters are, e.g. `bash myscript` or `python myscript`. Those tools (and potentially others) also require the correct file extension to run, or must be run in a particular way to get the exit codes consistently, so we must have first class knowledge about them. We provide default templates for these keywords as follows:
|
||||
- `cmd`: Default is: `%ComSpec% /D /E:ON /V:OFF /S /C "CALL "{0}""` where the script name is automatically appended with `.cmd` and substituted for `{0}`
|
||||
- Note this is equivalent to the default Windows behavior if no shell option is given
|
||||
- `pwsh`: Default is: `pwsh -command "& '{0}'"` where the script is automatically appended with `.ps1`
|
||||
- `powershell`: Default is: `powershell -command "& '{0}'"` where the script is automatically appended with `.ps1`
|
||||
- `bash`: Uses `bash --noprofile --norc -eo pipefail {0}`
|
||||
- The default behavior on non-Windows if no shell is given is to attempt this first
|
||||
- `sh`: Uses `sh -e {0}`
|
||||
- This is the default behavior on non-Windows if no shell is given, AND `bash` (see above) was not located on the PATH
|
||||
- `python`: `python {0}`
|
||||
- **NOTE**: The exact command ran may vary by machine. We only provide default arguments and command format for the listed shell. While the above behavior is expected on hosted machines, private runners may vary. For example, `sh` (or other commands) may actually be a link to `/bin/dash`, `/bin/bash`, or other
|
||||
|
||||
1. A template string: `command [...options] {0} [...more_options]`
|
||||
- As above, the file name of the temporary script will be templated in. This gives users more control to have options at any location relative to the script path
|
||||
- The first whitespace-delimited word of the string will be interpreted as the command
|
||||
- e.g. `python {0} arg1 arg2` or similar can be used if passing args is needed. Some shells will require other options after the filename for various reasons
|
||||
|
||||
Note that (1) simply provides defaults that are executed with the same mechanism as (2). That is:
|
||||
- A temporary script file is generated, and the path to that file is templated into the string at `{0}`
|
||||
- The first word of the formatted string is assumed to be a command, and we attempt to locate its full path
|
||||
- The fully qualified path to the command, plus the remaining arguments, is executed
|
||||
- e.g. `shell: bash` expands to `/bin/bash --noprofile --norc -eo pipefail /runner/_layout/_work/_temp/f8d4fb2b-19d9-47e6-a786-4cc538d52761.sh` on my private runner
|
||||
|
||||
At this time, **THE LIST OF WELL-KNOWN SHELL OPTIONS IS**:
|
||||
- cmd - Windows (hosted vs2017, vs2019) only
|
||||
- powershell - Windows (hosted vs2017, vs2019) only
|
||||
- sh - All hosted platforms
|
||||
- pwsh - All hosted platforms
|
||||
- bash - All hosted platforms
|
||||
- python - All hosted platforms. Can use setup-python to configure which python will be used
|
||||
___
|
||||
|
||||
### Containers
|
||||
For container jobs, `shell` should just work the same as above, transparently. We will simply `exec` the command in the job container, passing the same arguments in
|
||||
|
||||
___
|
||||
|
||||
### Exit codes / Error action preference
|
||||
|
||||
For builtin shells, we provide defaults that make the most sense for CI, running within Actions, and being executed by our runner
|
||||
|
||||
bash/sh:
|
||||
- Fail-fast behavior using `set -e o pipefail` is the default for `bash` and `shell` builtins, and by default when no option is given on non-Windows platforms
|
||||
- Users can opt out of fail-fast and take full control easily by providing a template string to the shell options, eg: `bash {0}`.
|
||||
- sh-like shells exit with the exit code of the last command executed in a script, and is our default behavior. Thus the runner reports the status of the step as fail/succeed based on this exit code
|
||||
|
||||
powershell/pwsh
|
||||
- Fail-fast behavior when possible. For `pwsh` and `powershell` builtins, we will prepend `$ErrorActionPreference = 'stop'` to script contents
|
||||
- We append `if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE }` to powershell scripts to get Action statuses to reflect the script's last exit code
|
||||
- Users can always opt out by not using the builtins, and providing a shell option like: `pwsh -File {0}`, or `powershell -Command "& '{0}'"`, depending on need
|
||||
|
||||
cmd
|
||||
- There doesnt seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we cant actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||
- cmd.exe will exit (return the error code to the runner) with the errorlevel of the last program it executed. This is internally consistent with the previous default behavior (sh, pwsh) and is the cmd.exe default, so we keep that behavior
|
||||
|
||||
## Consequences
|
||||
Valid `shell` options will depend on the hosted images. We will need to maintain tight image compat
|
||||
|
||||
First class support for a shell will require a major version schema change to modify. We cannot remove or modify the behavior of a well-known supported option, However, adding first class support for new shells is backwards compatible. For instance, we can add a well-known `python` option, because non-well-known options would have always needed to include `{0}`, e.g. `python {0}`
|
||||
60
docs/adrs/0278-env-context.md
Normal file
60
docs/adrs/0278-env-context.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# ADR 0278: Env Context
|
||||
|
||||
**Date**: 2019-09-30
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
User wants to reference workflow variables defined in workflow yaml file for action's input, displayName and condition.
|
||||
|
||||
## Decision
|
||||
|
||||
### Add `env` context in the runner
|
||||
|
||||
Runner will create and populate the `env` context for every job execution using following logic:
|
||||
1. On job start, create `env` context with any environment variables in the job message, these are env defined in customer's YAML file's job/workflow level `env` section.
|
||||
2. Update `env` context when customer use `::set-env::` to set env at the runner level.
|
||||
3. Update `env` context with step's `env` block before each step runs.
|
||||
|
||||
The `env` context is only available in the runner, customer can't use the `env` context in any server evaluation part, just like the `runner` context
|
||||
|
||||
Example yaml:
|
||||
```yaml
|
||||
|
||||
env:
|
||||
env1: 10
|
||||
env2: 20
|
||||
env3: 30
|
||||
jobs:
|
||||
build:
|
||||
env:
|
||||
env1: 100
|
||||
env2: 200
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo ${{ env.env1 }} // 1000
|
||||
echo $env1 // 1000
|
||||
echo $env2 // 200
|
||||
echo $env3 // 30
|
||||
if: env.env2 == 200 // true
|
||||
name: ${{ env.env1 }}_${{ env.env2 }} //1000_200
|
||||
env:
|
||||
env1: 1000
|
||||
```
|
||||
|
||||
### Don't populate the `env` context with environment variables from runner machine.
|
||||
|
||||
With job container and container action, the `env` context may not have the right value customer want and will cause confusion.
|
||||
Ex:
|
||||
```yaml
|
||||
build:
|
||||
runs-on: ubuntu-latest <- $USER=runner in hosted machine
|
||||
container: ubuntu:16.04 <- $USER=root in container
|
||||
steps:
|
||||
- run: echo ${{env.USER}} <- what should customer expect this output? runner/root
|
||||
- uses: docker://ubuntu:18.04
|
||||
with:
|
||||
args: echo ${{env.USER}} <- what should customer expect this output? runner/root
|
||||
```
|
||||
71
docs/adrs/0279-hashFiles-expression-function.md
Normal file
71
docs/adrs/0279-hashFiles-expression-function.md
Normal file
@@ -0,0 +1,71 @@
|
||||
# ADR 0279: HashFiles Expression Function
|
||||
|
||||
**Date**: 2019-09-30
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
First party action `actions/cache` needs a input which is an explicit `key` used for restoring and saving the cache. For packages caching, the most comment `key` might be the hash result of contents from all `package-lock.json` under `node_modules` folder.
|
||||
|
||||
There are serval different ways to get the hash `key` input for `actions/cache` action.
|
||||
|
||||
1. Customer calculate the `key` themselves from a different action, customer won't like this since it needs extra step for using cache feature
|
||||
```yaml
|
||||
steps:
|
||||
- run: |
|
||||
hash=some_linux_hash_method(file1, file2, file3)
|
||||
echo ::set-output name=hash::$hash
|
||||
id: createHash
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
key: ${{ steps.createHash.outputs.hash }}
|
||||
```
|
||||
|
||||
2. Make the `key` input of `actions/cache` follow certain convention to calculate hash, this limited the `key` input to a certain format customer may not want.
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
key: ${{ runner.os }}|${{ github.workspace }}|**/package-lock.json
|
||||
```
|
||||
|
||||
## Decision
|
||||
|
||||
### Add hashFiles() function to expression engine for calculate files' hash
|
||||
|
||||
`hashFiles()` will only allow on runner side since it needs to read files on disk, using `hashFiles()` on any server side evaluated expression will cause runtime errors.
|
||||
|
||||
`hashFiles()` will only support hashing files under the `$GITHUB_WORKSPACE` since the expression evaluated on the runner, if customer use job container or container action, the runner won't have access to file system inside the container.
|
||||
|
||||
`hashFiles()` will only take 1 parameters:
|
||||
- `hashFiles('**/package-lock.json')` // Search files under $GITHUB_WORKSPACE and calculate a hash for them
|
||||
|
||||
**Question: Do we need to support more than one match patterns?**
|
||||
Ex: `hashFiles('**/package-lock.json', '!toolkit/core/package-lock.json', '!toolkit/io/package-lock.json')`
|
||||
Answer: Only support single match pattern for GA, we can always add later.
|
||||
|
||||
This will help customer has better experience with the `actions/cache` action's input.
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
key: ${{hashFiles('**/package-lock.json')}}-${{github.ref}}-${{runner.os}}
|
||||
```
|
||||
|
||||
For search pattern, we will use basic globbing (`*` `?` and `[]`) and globstar (`**`).
|
||||
|
||||
Additional pattern details:
|
||||
- Root relative paths with `github.workspace` (the main repo)
|
||||
- Make `*` match files that start with `.`
|
||||
- Case insensitive on Windows
|
||||
- Accept `\` or `/` path separators on Windows
|
||||
|
||||
Hashing logic:
|
||||
1. Get all files under `$GITHUB_WORKSPACE`.
|
||||
2. Use search pattern filter all files to get files that matches the search pattern. (search pattern only apply to file path not folder path)
|
||||
3. Sort all matched files by full file path in alphabet order.
|
||||
4. Use SHA256 algorithm to hash each matched file and store hash result.
|
||||
5. Use SHA256 to hash all stored files' hash results to get the final 64 chars hash result.
|
||||
|
||||
**Question: Should we include the folder structure info into the hash?**
|
||||
Answer: No
|
||||
30
docs/adrs/0280-command-input-echoing.md
Normal file
30
docs/adrs/0280-command-input-echoing.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# ADR 0280: Echoing of Command Input
|
||||
|
||||
**Date**: 2019-11-04
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
Command echoing as a default behavior tends to clutter the user logs, so we want to swap to a system where users have to opt in to see this information.
|
||||
|
||||
Command outputs will still be echoed in the case there are any errors processing such commands. This is so the end user can have more context on why the command failed and help with troubleshooting.
|
||||
|
||||
Echo output in the user logs can be explicitly controlled by the new commands `::echo::on` and `::echo::off`. By default, echoing is enabled if `ACTIONS_STEP_DEBUG` secret is enabled, otherwise echoing is disabled.
|
||||
|
||||
## Decision
|
||||
- The only commands that currently echo output are
|
||||
- `remove-matcher`
|
||||
- `add-matcher`
|
||||
- `add-path`
|
||||
- These will no longer echo the command, if processed successfully
|
||||
- All commands echo the input when any of these conditions is fulfilled:
|
||||
1. When such commands fail with an error
|
||||
2. When `::echo::on` is set
|
||||
3. When the `ACTIONS_STEP_DEBUG` is set, and echoing hasn't been explicitly disabled with `::echo::off`
|
||||
- There are a few commands that won't be echoed, even when echo is enabled. These are (as of 2019/11/04):
|
||||
- `add-mask`
|
||||
- `debug`
|
||||
- `warning`
|
||||
- `error`
|
||||
- The three commands above will not echo, either because echoing the command would leak secrets (e.g. `add-mask`), or it would not add any additional troubleshooting information to the logs (e.g. `debug`). It's expected that future commands would follow these "echo-suppressing" guidelines as well. Echo-suppressed commands are still free to output other information to the logs, as deemed fit.
|
||||
48
docs/adrs/0297-base64-masking-trailing-characters.md
Normal file
48
docs/adrs/0297-base64-masking-trailing-characters.md
Normal file
@@ -0,0 +1,48 @@
|
||||
# ADR 0297: Base64 Masking Trailing Characters
|
||||
|
||||
**Date** 2020-01-21
|
||||
|
||||
**Status** Proposed
|
||||
|
||||
## Context
|
||||
|
||||
The Runner registers a number of Value Encoders, which mask various encodings of a provided secret. Currently, we register a 3 base64 Encoders:
|
||||
- The base64 encoded secret
|
||||
- The secret with the first character removed then base64 encoded
|
||||
- The secret with the first two characters removed then base64 encoded
|
||||
|
||||
This gives us good coverage across the board for secrets and secrets with a prefix (i.e. `base64($user:$pass)`).
|
||||
|
||||
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
|
||||
|
||||
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
|
||||
|
||||
## Decision
|
||||
|
||||
### Trim end characters
|
||||
|
||||
We are going to modify all existing base64 encoders to trim information before registering as a secret.
|
||||
We will trim:
|
||||
- `=` from the end of all base64 strings. This is a padding character that contains no information.
|
||||
- Based on the number of `=`'s at the end of a base64 string, a malicious user could predict the length of the original secret modulo 3.
|
||||
- If a user saw `***==`, they would know the secret could be 1,4,7,10... characters.
|
||||
- If a string contains `=` we will also trim the last non-padding character from the base64 secret.
|
||||
- This character can change if a string is appended to the secret before the encoding.
|
||||
|
||||
|
||||
### Register a fourth encoder
|
||||
|
||||
We will also add back in the original base64 encoded secret encoder for four total encoders:
|
||||
- The base64 encoded secret
|
||||
- The base64 encoded secret trimmed
|
||||
- The secret with the first character removed then base64 encoded and trimmed
|
||||
- The secret with the first two characters removed then base64 encoded and trimmed
|
||||
|
||||
This allows us to fully cover the most common scenario where a user base64 encodes their secret and expects the entire thing to be masked.
|
||||
This will result in us only revealing length or bit information when a prefix or suffix is added to a secret before encoding.
|
||||
|
||||
## Consequences
|
||||
|
||||
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
|
||||
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
|
||||
- Performance will suffer in a neglible way
|
||||
35
docs/adrs/0354-runner-machine-info.md
Normal file
35
docs/adrs/0354-runner-machine-info.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# ADR 354: Expose runner machine info
|
||||
|
||||
**Date**: 2020-03-02
|
||||
|
||||
**Status**: Pending
|
||||
|
||||
## Context
|
||||
|
||||
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
|
||||
Ex: Include OS/Software info from Hosted image.
|
||||
|
||||
## Decision
|
||||
|
||||
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
|
||||
```json
|
||||
[
|
||||
{
|
||||
"group": "OS Detail",
|
||||
"detail": "........"
|
||||
},
|
||||
{
|
||||
"group": "Software Detail",
|
||||
"detail": "........"
|
||||
}
|
||||
]
|
||||
```
|
||||
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
|
||||
|
||||
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
|
||||
|
||||
## Consequences
|
||||
|
||||
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
|
||||
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
|
||||
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.
|
||||
19
docs/adrs/README.md
Normal file
19
docs/adrs/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# ADRs
|
||||
|
||||
ADR, short for "Architecture Decision Record" is a way of capturing important architectural decisions, along with their context and consequences.
|
||||
|
||||
This folder includes ADRs for the actions runner. ADRs are proposed in the form of a pull request, and they commonly follow this format:
|
||||
|
||||
* **Title**: short present tense imperative phrase, less than 50 characters, like a git commit message.
|
||||
|
||||
* **Status**: proposed, accepted, rejected, deprecated, superseded, etc.
|
||||
|
||||
* **Context**: what is the issue that we're seeing that is motivating this decision or change.
|
||||
|
||||
* **Decision**: what is the change that we're actually proposing or doing.
|
||||
|
||||
* **Consequences**: what becomes easier or more difficult to do because of this change.
|
||||
|
||||
---
|
||||
|
||||
- More information about ADRs can be found [here](https://github.com/joelparkerhenderson/architecture_decision_record).
|
||||
@@ -1,10 +1,31 @@
|
||||
# Contribution guide for developers
|
||||
# Contributions
|
||||
|
||||
## Required Dev Dependencies
|
||||
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.
|
||||
|
||||
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
|
||||
|
||||
## Issues
|
||||
|
||||
Log issues for both bugs and enhancement requests. Logging issues are important for the open community.
|
||||
|
||||
Issues in this repository should be for the runner application. Note that the VM and virtual machine images (including the developer toolsets) installed on the actions hosted machine pools are located [in this repository](https://github.com/actions/virtual-environments)
|
||||
|
||||
## Enhancements and Feature Requests
|
||||
|
||||
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
|
||||
|
||||
1. Create a feature request. Once agreed we will take the enhancment
|
||||
2. Create an ADR to agree on the details of the change.
|
||||
|
||||
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](adrs/README.md)
|
||||
|
||||
## Development Life Cycle
|
||||
|
||||
### Required Dev Dependencies
|
||||
|
||||
 Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
|
||||
## To Build, Test, Layout
|
||||
### To Build, Test, Layout
|
||||
|
||||
Navigate to the `src` directory and run the following command:
|
||||
|
||||
@@ -23,18 +44,18 @@ Sample developer flow:
|
||||
```bash
|
||||
git clone https://github.com/actions/runner
|
||||
cd ./src
|
||||
./dev.(sh/cmd) layout # the runner that build from source is in {root}/_layout
|
||||
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
|
||||
<make code changes>
|
||||
./dev.(sh/cmd) build # {root}/_layout will get updated
|
||||
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
||||
```
|
||||
|
||||
## Editors
|
||||
### Editors
|
||||
|
||||
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
|
||||
[Using Visual Studio Code](https://code.visualstudio.com/)
|
||||
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
|
||||
|
||||
## Styling
|
||||
### Styling
|
||||
|
||||
We use the .NET Foundation and CoreCLR style guidelines [located here](
|
||||
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
|
||||
|
||||
61
docs/design/auth.md
Normal file
61
docs/design/auth.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# Runner Authentication and Authorization
|
||||
|
||||
## Goals
|
||||
- Support runner installs in untrusted domains.
|
||||
- The account that configures or runs the runner process is not relevant for accessing GitHub resources.
|
||||
- Accessing GitHub resources is done with a per-job token which expires when job completes.
|
||||
- The token is granted to trusted parts of the system including the runner, actions and script steps specified by the workflow author as trusted.
|
||||
- All OAuth tokens that come from the Token Service that the runner uses to access Actions Service resources are the same. It's just the scope and expiration of the token that may vary.
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuring a self-hosted runner is [covered here in the documentation](https://help.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners).
|
||||
|
||||
Configuration is done with the user being authenticated via a time-limited, GitHub runner registration token.
|
||||
|
||||
*Your credentials are never used for registering the runner with the service.*
|
||||
|
||||

|
||||
|
||||
During configuration, an RSA public/private key pair is created, the private key is stored in file on disk. On Windows, the content is protected with DPAPI (machine level encrypted - runner only valid on that machine) and on Linux/OSX with `chmod` permissions.
|
||||
|
||||
Using your credentials, the runner is registered with the service by sending the public key to the service which adds that runner to the pool and stores the public key, the Token Service will generate a `clientId` associated with the public key.
|
||||
|
||||
## Start and Listen
|
||||
|
||||
After configuring the runner, the runner can be started interactively (`./run.cmd` or `./run.sh`) or as a service.
|
||||
|
||||

|
||||
|
||||
On start, the runner listener process loads the RSA private key (on Windows decrypting with machine key DPAPI), and asks the Token Service for an OAuth token which is signed with the RSA private key.
|
||||
The server then responds with an OAuth token that grants permission to access the message queue (HTTP long poll), allowing the runner to acquire the messages it will eventually run.
|
||||
|
||||
## Run a workflow
|
||||
|
||||
When a workflow is run, its labels are evaluated, it is matched to a runner and a message is placed in a queue of messages for that runner.
|
||||
The runner then starts listening for jobs via the message queue HTTP long poll.
|
||||
The message is encrypted with the runner's public key, stored during runner configuration.
|
||||
|
||||

|
||||
|
||||
A workflow is queued as a result of a triggered [event](https://help.github.com/en/actions/reference/events-that-trigger-workflows). Workflows can be scheduled to [run at specific UTC times](https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) using POSIX `cron` syntax.
|
||||
An [OAuth token](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) is generated, granting limited access to the host in Actions Service associated with the github.com repository/organization.
|
||||
The lifetime of the OAuth token is the lifetime of the run or at most the [job timeout (default: 6 hours)](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes), plus 10 additional minutes.
|
||||
|
||||
## Accessing GitHub resources
|
||||
|
||||
The job message sent to the runner contains the OAuth token to talk back to the Actions Service.
|
||||
The runner listener parent process will spawn a runner worker process for that job and send it the job message over IPC.
|
||||
The token is never persisted.
|
||||
|
||||
Each action is run as a unique subprocess.
|
||||
The encrypted access token will be provided as an environment variable in each action subprocess.
|
||||
The token is registered with the runner as a secret and scrubbed from the logs as they are written.
|
||||
|
||||
Authentication in a workflow run to github.com can be accomplished by using the [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#about-the-github_token-secret)) secret. This token expires after 60 minutes. Please note that this token is different from the OAuth token that the runner uses to talk to the Actions Service.
|
||||
|
||||
## Hosted runner authentication
|
||||
|
||||
Hosted runner authentication differs from self-hosted authentication in that runners do not undergo a registration process, but instead, the hosted runners get the OAuth token directly by reading the `.credentials` file. The scope of this particular token is limited for a given workflow job execution, and the token is revoked as soon as the job is finished.
|
||||
|
||||

|
||||
BIN
docs/res/hosted-config-start.png
Normal file
BIN
docs/res/hosted-config-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
52
docs/res/runner-auth-diags.txt
Normal file
52
docs/res/runner-auth-diags.txt
Normal file
@@ -0,0 +1,52 @@
|
||||
# Markup used to generate the runner auth diagrams: https://websequencediagrams.com
|
||||
|
||||
title Runner Configuration (self-hosted only)
|
||||
|
||||
note left of Runner: GitHub repo URL as input
|
||||
Runner->github.com: Retrieve Actions Service access using runner registration token
|
||||
github.com->Runner: Access token for Actions Service
|
||||
note left of Runner: Generate RSA key pair
|
||||
note left of Runner: Store encrypted RSA private key on disk
|
||||
Runner->Actions Service: Register runner using Actions Service access token
|
||||
note right of Runner: Runner name, RSA public key sent
|
||||
note right of Actions Service: Public key stored
|
||||
Actions Service->Token Service: Register runner as an app along with the RSA public key
|
||||
note right of Token Service: Public key stored
|
||||
Token Service->Actions Service: Client Id for the runner application
|
||||
Actions Service->Runner: Client Id and Token Endpoint URL
|
||||
note left of Runner: Store runner configuration info into .runner file
|
||||
note left of Runner: Store Token registration info into .credentials file
|
||||
|
||||
title Runner Start and Running (self-hosted only)
|
||||
|
||||
Runner.Listener->Runner.Listener: Start
|
||||
note left of Runner.Listener: Load config info from .runner
|
||||
note left of Runner.Listener: Load token registration from .credentials
|
||||
Runner.Listener->Token Service: Exchange OAuth token (happens every 50 mins)
|
||||
note right of Runner.Listener: Construct JWT token, use Client Id signed by RSA private key
|
||||
note left of Actions Service: Find corresponding RSA public key, use Client Id\nVerify JWT token's signature
|
||||
Token Service->Runner.Listener: OAuth token with limited permission and valid for 50 mins
|
||||
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token
|
||||
Actions Service->Runner.Listener: Workflow job
|
||||
|
||||
title Running workflow
|
||||
|
||||
Runner.Listener->Service (Message Queue): Get message
|
||||
note right of Runner.Listener: Authenticate with exchanged OAuth token
|
||||
Event->Actions Service: Queue workflow
|
||||
Actions Service->Actions Service: Generate OAuth token per job
|
||||
Actions Service->Actions Service: Build job message with the OAuth token
|
||||
Actions Service->Actions Service: Encrypt job message with the target runner's public key
|
||||
Actions Service->Service (Message Queue): Send encrypted job message to runner
|
||||
Service (Message Queue)->Runner.Listener: Send job
|
||||
note right of Runner.Listener: Decrypt message with runner's private key
|
||||
Runner.Listener->Runner.Worker: Create worker process per job and run the job
|
||||
|
||||
title Runner Configuration, Start and Running (hosted only)
|
||||
|
||||
Machine Management Service->Runner.Listener: Construct .runner configuration file, store token in .credentials
|
||||
Runner.Listener->Runner.Listener: Start
|
||||
note left of Runner.Listener: Load config info from .runner
|
||||
note left of Runner.Listener: Load OAuth token from .credentials
|
||||
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token in .credentials
|
||||
Actions Service->Runner.Listener: Workflow job
|
||||
BIN
docs/res/self-hosted-config.png
Normal file
BIN
docs/res/self-hosted-config.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/res/self-hosted-start.png
Normal file
BIN
docs/res/self-hosted-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
BIN
docs/res/workflow-run.png
Normal file
BIN
docs/res/workflow-run.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 46 KiB |
@@ -28,7 +28,7 @@ Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 depe
|
||||
```
|
||||
You can easily correct the problem by executing `./bin/installdependencies.sh`.
|
||||
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions
|
||||
> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`). You might need to deal with error coming from the package management mechanism related to your setup, like [#1353](https://github.com/Microsoft/vsts-agent/issues/1353)
|
||||
> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`).
|
||||
|
||||
### Full dependencies list
|
||||
|
||||
|
||||
@@ -9,4 +9,4 @@
|
||||
- Windows Server 2016 64-bit
|
||||
- Windows Server 2019 64-bit
|
||||
|
||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
FROM mcr.microsoft.com/dotnet/core/runtime-deps:2.1
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -1,150 +0,0 @@
|
||||
FROM centos:6
|
||||
|
||||
# Install dependencies
|
||||
|
||||
RUN yum install -y \
|
||||
centos-release-SCL \
|
||||
epel-release \
|
||||
wget \
|
||||
unzip \
|
||||
&& \
|
||||
rpm --import http://linuxsoft.cern.ch/cern/slc6X/x86_64/RPM-GPG-KEY-cern && \
|
||||
wget -O /etc/yum.repos.d/slc6-devtoolset.repo http://linuxsoft.cern.ch/cern/devtoolset/slc6-devtoolset.repo && \
|
||||
yum install -y \
|
||||
"perl(Time::HiRes)" \
|
||||
autoconf \
|
||||
cmake \
|
||||
cmake3 \
|
||||
devtoolset-2-toolchain \
|
||||
doxygen \
|
||||
expat-devel \
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
gdb \
|
||||
gettext-devel \
|
||||
krb5-devel \
|
||||
libedit-devel \
|
||||
libidn-devel \
|
||||
libmetalink-devel \
|
||||
libnghttp2-devel \
|
||||
libssh2-devel \
|
||||
libunwind-devel \
|
||||
libuuid-devel \
|
||||
lttng-ust-devel \
|
||||
lzma \
|
||||
ncurses-devel \
|
||||
openssl-devel \
|
||||
perl-devel \
|
||||
python-argparse \
|
||||
python27 \
|
||||
readline-devel \
|
||||
swig \
|
||||
xz \
|
||||
zlib-devel \
|
||||
&& \
|
||||
yum clean all
|
||||
|
||||
# Build and install clang and lldb 3.9.1
|
||||
|
||||
RUN wget ftp://sourceware.org/pub/binutils/snapshots/binutils-2.29.1.tar.xz && \
|
||||
wget http://releases.llvm.org/3.9.1/cfe-3.9.1.src.tar.xz && \
|
||||
wget http://releases.llvm.org/3.9.1/llvm-3.9.1.src.tar.xz && \
|
||||
wget http://releases.llvm.org/3.9.1/lldb-3.9.1.src.tar.xz && \
|
||||
wget http://releases.llvm.org/3.9.1/compiler-rt-3.9.1.src.tar.xz && \
|
||||
\
|
||||
tar -xf binutils-2.29.1.tar.xz && \
|
||||
tar -xf llvm-3.9.1.src.tar.xz && \
|
||||
mkdir llvm-3.9.1.src/tools/clang && \
|
||||
mkdir llvm-3.9.1.src/tools/lldb && \
|
||||
mkdir llvm-3.9.1.src/projects/compiler-rt && \
|
||||
tar -xf cfe-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/clang && \
|
||||
tar -xf lldb-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/lldb && \
|
||||
tar -xf compiler-rt-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/projects/compiler-rt && \
|
||||
rm binutils-2.29.1.tar.xz && \
|
||||
rm cfe-3.9.1.src.tar.xz && \
|
||||
rm lldb-3.9.1.src.tar.xz && \
|
||||
rm llvm-3.9.1.src.tar.xz && \
|
||||
rm compiler-rt-3.9.1.src.tar.xz && \
|
||||
\
|
||||
mkdir llvmbuild && \
|
||||
cd llvmbuild && \
|
||||
scl enable python27 devtoolset-2 \
|
||||
' \
|
||||
cmake3 \
|
||||
-DCMAKE_CXX_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/g++ \
|
||||
-DCMAKE_C_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/gcc \
|
||||
-DCMAKE_LINKER=/opt/rh/devtoolset-2/root/usr/bin/ld \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DLLVM_LIBDIR_SUFFIX=64 \
|
||||
-DLLVM_ENABLE_EH=1 \
|
||||
-DLLVM_ENABLE_RTTI=1 \
|
||||
-DLLVM_BINUTILS_INCDIR=../binutils-2.29.1/include \
|
||||
../llvm-3.9.1.src \
|
||||
&& \
|
||||
make -j $(($(getconf _NPROCESSORS_ONLN)+1)) && \
|
||||
make install \
|
||||
' && \
|
||||
cd .. && \
|
||||
rm -r llvmbuild && \
|
||||
rm -r llvm-3.9.1.src && \
|
||||
rm -r binutils-2.29.1
|
||||
|
||||
# Build and install curl 7.45.0
|
||||
|
||||
RUN wget https://curl.haxx.se/download/curl-7.45.0.tar.lzma && \
|
||||
tar -xf curl-7.45.0.tar.lzma && \
|
||||
rm curl-7.45.0.tar.lzma && \
|
||||
cd curl-7.45.0 && \
|
||||
scl enable python27 devtoolset-2 \
|
||||
' \
|
||||
./configure \
|
||||
--disable-dict \
|
||||
--disable-ftp \
|
||||
--disable-gopher \
|
||||
--disable-imap \
|
||||
--disable-ldap \
|
||||
--disable-ldaps \
|
||||
--disable-libcurl-option \
|
||||
--disable-manual \
|
||||
--disable-pop3 \
|
||||
--disable-rtsp \
|
||||
--disable-smb \
|
||||
--disable-smtp \
|
||||
--disable-telnet \
|
||||
--disable-tftp \
|
||||
--enable-ipv6 \
|
||||
--enable-optimize \
|
||||
--enable-symbol-hiding \
|
||||
--with-ca-bundle=/etc/pki/tls/certs/ca-bundle.crt \
|
||||
--with-nghttp2 \
|
||||
--with-gssapi \
|
||||
--with-ssl \
|
||||
--without-librtmp \
|
||||
&& \
|
||||
make install \
|
||||
' && \
|
||||
cd .. && \
|
||||
rm -r curl-7.45.0
|
||||
|
||||
# Install ICU 57.1
|
||||
|
||||
RUN wget http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-RHEL6-x64.tgz && \
|
||||
tar -xf icu4c-57_1-RHEL6-x64.tgz -C / && \
|
||||
rm icu4c-57_1-RHEL6-x64.tgz
|
||||
|
||||
# Compile and install a version of the git that supports the features that cli repo build needs
|
||||
# NOTE: The git needs to be built after the curl so that it can use the libcurl to add https
|
||||
# protocol support.
|
||||
RUN \
|
||||
wget https://www.kernel.org/pub/software/scm/git/git-2.9.5.tar.gz && \
|
||||
tar -xf git-2.9.5.tar.gz && \
|
||||
rm git-2.9.5.tar.gz && \
|
||||
cd git-2.9.5 && \
|
||||
make configure && \
|
||||
./configure --prefix=/usr/local --without-tcltk && \
|
||||
make -j $(nproc --all) all && \
|
||||
make install && \
|
||||
cd .. && \
|
||||
rm -r git-2.9.5
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib
|
||||
@@ -1,17 +1,30 @@
|
||||
## Features
|
||||
- Remove runner flow: Change from PAT to "deletion token" in prompt (#225)
|
||||
- Expose github.run_id and github.run_number to action runtime env. (#224)
|
||||
- Expose whether debug is on/off via RUNNER_DEBUG. (#253)
|
||||
- Upload log on runner when worker get killed due to cancellation timeout. (#255)
|
||||
- Update config.sh/cmd --help documentation (#282)
|
||||
- Set http_proxy and related env vars for job/service containers (#304)
|
||||
- Set both http_proxy and HTTP_PROXY env for runner/worker processes. (#298)
|
||||
|
||||
## Bugs
|
||||
- Clean up error messages for container scenarios (#221)
|
||||
- Pick shell from prependpath (#231)
|
||||
- Verify runner Windows service hash started successfully after configuration (#236)
|
||||
- Detect source file path in L0 without using env. (#257)
|
||||
- Handle escaped '%' in commands data section (#200)
|
||||
- Allow container to be null/empty during matrix expansion (#266)
|
||||
- Translate problem matcher file to host path (#272)
|
||||
- Change hashFiles() expression function to use @actions/glob. (#268)
|
||||
- Default post-job action's condition to always(). (#293)
|
||||
- Support action.yaml file as action's entry file (#288)
|
||||
- Trace javascript action exit code to debug instead of user logs (#290)
|
||||
- Change prompt message when removing a runner to lines up with GitHub.com UI (#303)
|
||||
- Include step.env as part of env context. (#300)
|
||||
- Update Base64 Encoders to deal with suffixes (#284)
|
||||
|
||||
## Misc
|
||||
- Runner code cleanup (#218 #227, #228, #229, #230)
|
||||
- Consume dotnet core 3.1 in runner. (#213)
|
||||
- Move .sln file under ./src (#238)
|
||||
- Treat warnings as errors during compile (#249)
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner under "<DRIVE>:\actions-runner". This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
|
||||
```
|
||||
// Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
@@ -19,7 +32,7 @@ mkdir \actions-runner ; cd \actions-runner
|
||||
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
|
||||
// Extract the installer
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$HOME\Downloads\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
|
||||
## OSX
|
||||
@@ -28,7 +41,7 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
// Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
@@ -39,7 +52,7 @@ tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
@@ -50,7 +63,7 @@ tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
@@ -61,7 +74,7 @@ tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
// Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
@@ -3,23 +3,23 @@ Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.29411.138
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Common", "src\Runner.Common\Runner.Common.csproj", "{084289A3-CD7A-42E0-9219-4348B4B7E19B}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Common", "Runner.Common\Runner.Common.csproj", "{084289A3-CD7A-42E0-9219-4348B4B7E19B}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Listener", "src\Runner.Listener\Runner.Listener.csproj", "{7D461AEE-BF2A-4855-BD96-56921160B36A}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Listener", "Runner.Listener\Runner.Listener.csproj", "{7D461AEE-BF2A-4855-BD96-56921160B36A}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.PluginHost", "src\Runner.PluginHost\Runner.PluginHost.csproj", "{D0320EB1-CB6D-4179-BFDC-2F2B664A370C}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.PluginHost", "Runner.PluginHost\Runner.PluginHost.csproj", "{D0320EB1-CB6D-4179-BFDC-2F2B664A370C}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Plugins", "src\Runner.Plugins\Runner.Plugins.csproj", "{C23AFD6F-4DCD-4243-BC61-865BE31B9168}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Plugins", "Runner.Plugins\Runner.Plugins.csproj", "{C23AFD6F-4DCD-4243-BC61-865BE31B9168}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Sdk", "src\Runner.Sdk\Runner.Sdk.csproj", "{D0484633-DA97-4C34-8E47-1DADE212A57A}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Sdk", "Runner.Sdk\Runner.Sdk.csproj", "{D0484633-DA97-4C34-8E47-1DADE212A57A}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RunnerService", "src\Runner.Service\Windows\RunnerService.csproj", "{D12EBD71-0464-46D0-8394-40BCFBA0A6F2}"
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RunnerService", "Runner.Service\Windows\RunnerService.csproj", "{D12EBD71-0464-46D0-8394-40BCFBA0A6F2}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Worker", "src\Runner.Worker\Runner.Worker.csproj", "{C2F5B9FA-2621-411F-8EB2-273ED276F503}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Worker", "Runner.Worker\Runner.Worker.csproj", "{C2F5B9FA-2621-411F-8EB2-273ED276F503}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "src\Sdk\Sdk.csproj", "{D2EE812B-E4DF-49BB-AE87-12BC49949B5F}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "Sdk\Sdk.csproj", "{D2EE812B-E4DF-49BB-AE87-12BC49949B5F}"
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "src\Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
@@ -46,4 +46,9 @@
|
||||
<PropertyGroup Condition="'$(Configuration)' == 'Debug'">
|
||||
<DefineConstants>$(DefineConstants);DEBUG</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Set Treat tarnings as errors -->
|
||||
<PropertyGroup>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
3
src/Misc/expressionFunc/hashFiles/.eslintignore
Normal file
3
src/Misc/expressionFunc/hashFiles/.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
59
src/Misc/expressionFunc/hashFiles/.eslintrc.json
Normal file
59
src/Misc/expressionFunc/hashFiles/.eslintrc.json
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["plugin:github/es6"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
"eslint-comments/no-use": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-console": "off",
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/ban-ts-ignore": "error",
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/camelcase": "error",
|
||||
"@typescript-eslint/class-name-casing": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-for-in-array": "error",
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-object-literal-type-assertion": "error",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/no-var-requires": "error",
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-interface": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
}
|
||||
}
|
||||
3
src/Misc/expressionFunc/hashFiles/.prettierignore
Normal file
3
src/Misc/expressionFunc/hashFiles/.prettierignore
Normal file
@@ -0,0 +1,3 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
11
src/Misc/expressionFunc/hashFiles/.prettierrc.json
Normal file
11
src/Misc/expressionFunc/hashFiles/.prettierrc.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": false,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
||||
1
src/Misc/expressionFunc/hashFiles/README.md
Normal file
1
src/Misc/expressionFunc/hashFiles/README.md
Normal file
@@ -0,0 +1 @@
|
||||
To update hashFiles under `Misc/layoutbin` run `npm install && npm run all`
|
||||
2347
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
Normal file
2347
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
35
src/Misc/expressionFunc/hashFiles/package.json
Normal file
35
src/Misc/expressionFunc/hashFiles/package.json
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "hashFiles",
|
||||
"version": "1.0.0",
|
||||
"description": "GitHub Actions HashFiles() expression function",
|
||||
"main": "lib/hashFiles.js",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"lint": "eslint src/**/*.ts",
|
||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/runner.git"
|
||||
},
|
||||
"keywords": [
|
||||
"actions"
|
||||
],
|
||||
"author": "GitHub Actions",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/glob": "^0.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.7.12",
|
||||
"@typescript-eslint/parser": "^2.8.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"prettier": "^1.19.1",
|
||||
"typescript": "^3.6.4"
|
||||
}
|
||||
}
|
||||
55
src/Misc/expressionFunc/hashFiles/src/hashFiles.ts
Normal file
55
src/Misc/expressionFunc/hashFiles/src/hashFiles.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import * as glob from '@actions/glob'
|
||||
import * as crypto from 'crypto'
|
||||
import * as fs from 'fs'
|
||||
import * as stream from 'stream'
|
||||
import * as util from 'util'
|
||||
import * as path from 'path'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
// arg0 -> node
|
||||
// arg1 -> hashFiles.js
|
||||
// env[followSymbolicLinks] = true/null
|
||||
// env[patterns] -> glob patterns
|
||||
let followSymbolicLinks = false
|
||||
const matchPatterns = process.env.patterns || ''
|
||||
if (process.env.followSymbolicLinks === 'true') {
|
||||
console.log('Follow symbolic links')
|
||||
followSymbolicLinks = true
|
||||
}
|
||||
|
||||
console.log(`Match Pattern: ${matchPatterns}`)
|
||||
let hasMatch = false
|
||||
const githubWorkspace = process.cwd()
|
||||
const result = crypto.createHash('sha256')
|
||||
let count = 0
|
||||
const globber = await glob.create(matchPatterns, {followSymbolicLinks})
|
||||
for await (const file of globber.globGenerator()) {
|
||||
console.log(file)
|
||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`)
|
||||
continue
|
||||
}
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
console.log(`Skip directory '${file}'.`)
|
||||
continue
|
||||
}
|
||||
const hash = crypto.createHash('sha256')
|
||||
const pipeline = util.promisify(stream.pipeline)
|
||||
await pipeline(fs.createReadStream(file), hash)
|
||||
result.write(hash.digest())
|
||||
count++
|
||||
if (!hasMatch) {
|
||||
hasMatch = true
|
||||
}
|
||||
}
|
||||
result.end()
|
||||
|
||||
if (hasMatch) {
|
||||
console.log(`Find ${count} files to hash.`)
|
||||
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
|
||||
} else {
|
||||
console.error(`__OUTPUT____OUTPUT__`)
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
12
src/Misc/expressionFunc/hashFiles/tsconfig.json
Normal file
12
src/Misc/expressionFunc/hashFiles/tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
},
|
||||
"exclude": ["node_modules", "**/*.test.ts"]
|
||||
}
|
||||
2623
src/Misc/layoutbin/hashFiles/index.js
Normal file
2623
src/Misc/layoutbin/hashFiles/index.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,8 +3,6 @@
|
||||
<packageSources>
|
||||
<!--To inherit the global NuGet package sources remove the <clear/> line below -->
|
||||
<clear />
|
||||
<add key="dotnet-core" value="https://www.myget.org/F/dotnet-core/api/v3/index.json" />
|
||||
<add key="dotnet-buildtools" value="https://www.myget.org/F/dotnet-buildtools/api/v3/index.json" />
|
||||
<add key="api.nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||
</packageSources>
|
||||
</configuration>
|
||||
|
||||
@@ -9,26 +9,27 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
private static readonly EscapeMapping[] _escapeMappings = new[]
|
||||
{
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
new EscapeMapping(token: ";", replacement: "%3B"),
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: "]", replacement: "%5D"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private static readonly EscapeMapping[] _escapeDataMappings = new[]
|
||||
{
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private static readonly EscapeMapping[] _escapePropertyMappings = new[]
|
||||
{
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||
new EscapeMapping(token: ":", replacement: "%3A"),
|
||||
new EscapeMapping(token: ",", replacement: "%2C"),
|
||||
new EscapeMapping(token: "%", replacement: "%25"),
|
||||
};
|
||||
|
||||
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
@@ -78,8 +78,10 @@ namespace GitHub.Runner.Common
|
||||
bool IsServiceConfigured();
|
||||
bool HasCredentials();
|
||||
CredentialData GetCredentials();
|
||||
CredentialData GetMigratedCredentials();
|
||||
RunnerSettings GetSettings();
|
||||
void SaveCredential(CredentialData credential);
|
||||
void SaveMigratedCredential(CredentialData credential);
|
||||
void SaveSettings(RunnerSettings settings);
|
||||
void DeleteCredential();
|
||||
void DeleteSettings();
|
||||
@@ -90,9 +92,11 @@ namespace GitHub.Runner.Common
|
||||
private string _binPath;
|
||||
private string _configFilePath;
|
||||
private string _credFilePath;
|
||||
private string _migratedCredFilePath;
|
||||
private string _serviceConfigFilePath;
|
||||
|
||||
private CredentialData _creds;
|
||||
private CredentialData _migratedCreds;
|
||||
private RunnerSettings _settings;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -114,6 +118,9 @@ namespace GitHub.Runner.Common
|
||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||
|
||||
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
|
||||
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
|
||||
|
||||
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
|
||||
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
|
||||
}
|
||||
@@ -123,7 +130,7 @@ namespace GitHub.Runner.Common
|
||||
public bool HasCredentials()
|
||||
{
|
||||
Trace.Info("HasCredentials()");
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists;
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
||||
Trace.Info("stored {0}", credsStored);
|
||||
return credsStored;
|
||||
}
|
||||
@@ -154,6 +161,16 @@ namespace GitHub.Runner.Common
|
||||
return _creds;
|
||||
}
|
||||
|
||||
public CredentialData GetMigratedCredentials()
|
||||
{
|
||||
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
|
||||
{
|
||||
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
|
||||
}
|
||||
|
||||
return _migratedCreds;
|
||||
}
|
||||
|
||||
public RunnerSettings GetSettings()
|
||||
{
|
||||
if (_settings == null)
|
||||
@@ -188,6 +205,21 @@ namespace GitHub.Runner.Common
|
||||
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveMigratedCredential(CredentialData credential)
|
||||
{
|
||||
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
|
||||
if (File.Exists(_migratedCredFilePath))
|
||||
{
|
||||
// Delete existing credential file first, since the file is hidden and not able to overwrite.
|
||||
Trace.Info("Delete exist runner migrated credential file.");
|
||||
IOUtil.DeleteFile(_migratedCredFilePath);
|
||||
}
|
||||
|
||||
IOUtil.SaveObject(credential, _migratedCredFilePath);
|
||||
Trace.Info("Migrated Credentials Saved.");
|
||||
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
|
||||
}
|
||||
|
||||
public void SaveSettings(RunnerSettings settings)
|
||||
{
|
||||
Trace.Info("Saving runner settings.");
|
||||
@@ -206,6 +238,7 @@ namespace GitHub.Runner.Common
|
||||
public void DeleteCredential()
|
||||
{
|
||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
|
||||
}
|
||||
|
||||
public void DeleteSettings()
|
||||
|
||||
@@ -19,11 +19,13 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Runner,
|
||||
Credentials,
|
||||
MigratedCredentials,
|
||||
RSACredentials,
|
||||
Service,
|
||||
CredentialStore,
|
||||
Certificates,
|
||||
Options,
|
||||
SetupInfo,
|
||||
}
|
||||
|
||||
public static class Constants
|
||||
@@ -136,6 +138,12 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
{
|
||||
public static readonly string Register = "register";
|
||||
public static readonly string Remove = "remove";
|
||||
}
|
||||
|
||||
public static class Pipeline
|
||||
{
|
||||
public static class Path
|
||||
@@ -162,7 +170,8 @@ namespace GitHub.Runner.Common
|
||||
public static class Path
|
||||
{
|
||||
public static readonly string ActionsDirectory = "_actions";
|
||||
public static readonly string ActionManifestFile = "action.yml";
|
||||
public static readonly string ActionManifestYmlFile = "action.yml";
|
||||
public static readonly string ActionManifestYamlFile = "action.yaml";
|
||||
public static readonly string BinDirectory = "bin";
|
||||
public static readonly string DiagDirectory = "_diag";
|
||||
public static readonly string ExternalsDirectory = "externals";
|
||||
|
||||
@@ -281,6 +281,12 @@ namespace GitHub.Runner.Common
|
||||
".credentials");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.MigratedCredentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".credentials_migrated");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.RSACredentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
@@ -316,6 +322,13 @@ namespace GitHub.Runner.Common
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".options");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.SetupInfo:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".setup_info");
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ namespace GitHub.Runner.Common
|
||||
|
||||
private Guid _timelineId;
|
||||
private Guid _timelineRecordId;
|
||||
private string _pageId;
|
||||
private FileStream _pageData;
|
||||
private StreamWriter _pageWriter;
|
||||
private int _byteCount;
|
||||
@@ -40,7 +39,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_totalLines = 0;
|
||||
_pageId = Guid.NewGuid().ToString();
|
||||
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
|
||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||
Directory.CreateDirectory(_pagesFolder);
|
||||
@@ -102,7 +100,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
EndPage();
|
||||
_byteCount = 0;
|
||||
_dataFileName = Path.Combine(_pagesFolder, $"{_pageId}_{++_pageCount}.log");
|
||||
_dataFileName = Path.Combine(_pagesFolder, $"{_timelineId}_{_timelineRecordId}_{++_pageCount}.log");
|
||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
|
||||
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
|
||||
}
|
||||
|
||||
@@ -50,6 +50,10 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
|
||||
|
||||
// runner authorization url
|
||||
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
|
||||
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
|
||||
}
|
||||
|
||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||
@@ -334,5 +338,20 @@ namespace GitHub.Runner.Common
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Runner Auth Url
|
||||
//-----------------------------------------------------------------
|
||||
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
|
||||
}
|
||||
|
||||
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,7 +190,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.Token,
|
||||
description: "Enter runner deletion token:",
|
||||
description: "Enter runner remove token:",
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
@@ -291,7 +291,7 @@ namespace GitHub.Runner.Listener
|
||||
if (!string.IsNullOrEmpty(result))
|
||||
{
|
||||
// After read the arg from input commandline args, remove it from Arg dictionary,
|
||||
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
|
||||
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
|
||||
// It will prompt for input instead of continue use the bad input.
|
||||
_trace.Info($"Remove {name} from Arg dictionary.");
|
||||
RemoveArg(name);
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using System.Runtime.InteropServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
@@ -109,7 +108,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
runnerSettings.GitHubUrl = inputUrl;
|
||||
var githubToken = command.GetRunnerRegisterToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken);
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
@@ -277,12 +276,15 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||
}
|
||||
|
||||
// Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired.
|
||||
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials credential = credMgr.LoadCredentials();
|
||||
try
|
||||
{
|
||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
||||
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
|
||||
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
|
||||
await _runnerServer.GetAgentPoolsAsync();
|
||||
_term.WriteSuccessMessage("Runner connection is good");
|
||||
}
|
||||
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
|
||||
@@ -373,7 +375,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
var githubToken = command.GetRunnerDeletionToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken);
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -517,17 +519,23 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken)
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
{
|
||||
var gitHubUrl = new UriBuilder(githubUrl);
|
||||
var githubApiUrl = $"https://api.{gitHubUrl.Host}/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
|
||||
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json"));
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json"));
|
||||
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
{
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
|
||||
@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public interface ICredentialManager : IRunnerService
|
||||
{
|
||||
ICredentialProvider GetCredentialProvider(string credType);
|
||||
VssCredentials LoadCredentials();
|
||||
VssCredentials LoadCredentials(bool preferMigrated = true);
|
||||
}
|
||||
|
||||
public class CredentialManager : RunnerService, ICredentialManager
|
||||
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return creds;
|
||||
}
|
||||
|
||||
public VssCredentials LoadCredentials()
|
||||
public VssCredentials LoadCredentials(bool preferMigrated = true)
|
||||
{
|
||||
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
|
||||
|
||||
@@ -50,6 +50,16 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
|
||||
CredentialData credData = store.GetCredentials();
|
||||
|
||||
if (preferMigrated)
|
||||
{
|
||||
var migratedCred = store.GetMigratedCredentials();
|
||||
if (migratedCred != null)
|
||||
{
|
||||
credData = migratedCred;
|
||||
}
|
||||
}
|
||||
|
||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||
credProv.CredentialData = credData;
|
||||
|
||||
|
||||
@@ -678,6 +678,17 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (service != null)
|
||||
{
|
||||
service.Start();
|
||||
|
||||
try
|
||||
{
|
||||
_term.WriteLine("Waiting for service to start...");
|
||||
service.WaitForStatus(ServiceControllerStatus.Running, TimeSpan.FromSeconds(60));
|
||||
}
|
||||
catch (System.ServiceProcess.TimeoutException)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot start the service {serviceName} in a timely fashion.");
|
||||
}
|
||||
|
||||
_term.WriteLine($"Service {serviceName} started successfully");
|
||||
}
|
||||
else
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
@@ -29,7 +28,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
|
||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
|
||||
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
|
||||
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
|
||||
@@ -39,7 +38,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var keyManager = context.GetService<IRSAKeyManager>();
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
||||
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
|
||||
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||
|
||||
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
|
||||
// is explicitly set to null to ensure we never do that negotiation.
|
||||
|
||||
@@ -18,6 +18,7 @@ namespace GitHub.Runner.Listener
|
||||
[ServiceLocator(Default = typeof(JobDispatcher))]
|
||||
public interface IJobDispatcher : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
|
||||
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
|
||||
bool Cancel(JobCancelMessage message);
|
||||
@@ -69,6 +70,8 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
|
||||
{
|
||||
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
|
||||
@@ -247,7 +250,7 @@ namespace GitHub.Runner.Listener
|
||||
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
|
||||
if (completedTask != jobDispatch.WorkerDispatch)
|
||||
{
|
||||
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
|
||||
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||
// no need to localize the exception string should never happen.
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
||||
}
|
||||
@@ -296,190 +299,290 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||
{
|
||||
if (previousJobDispatch != null)
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
|
||||
await EnsureDispatchFinished(previousJobDispatch);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"This is the first job request.");
|
||||
}
|
||||
|
||||
var term = HostContext.GetService<ITerminal>();
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
|
||||
|
||||
// first job request renew succeed.
|
||||
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
|
||||
var notification = HostContext.GetService<IJobNotification>();
|
||||
|
||||
// lock renew cancellation token.
|
||||
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
||||
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
||||
{
|
||||
long requestId = message.RequestId;
|
||||
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
||||
|
||||
// start renew job request
|
||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
|
||||
// wait till first renew succeed or job request is canceled
|
||||
// not even start worker if the first renew fail
|
||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||
|
||||
if (renewJobRequest.IsCompleted)
|
||||
if (previousJobDispatch != null)
|
||||
{
|
||||
// renew job request task complete means we run out of retry for the first job request renew.
|
||||
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
|
||||
return;
|
||||
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
|
||||
await EnsureDispatchFinished(previousJobDispatch);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose($"This is the first job request.");
|
||||
}
|
||||
|
||||
if (jobRequestCancellationToken.IsCancellationRequested)
|
||||
var term = HostContext.GetService<ITerminal>();
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
|
||||
|
||||
// first job request renew succeed.
|
||||
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
|
||||
var notification = HostContext.GetService<IJobNotification>();
|
||||
|
||||
// lock renew cancellation token.
|
||||
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
||||
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
||||
{
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
lockRenewalTokenSource.Cancel();
|
||||
// renew job request should never blows up.
|
||||
await renewJobRequest;
|
||||
long requestId = message.RequestId;
|
||||
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
||||
|
||||
// complete job request with result Cancelled
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
|
||||
return;
|
||||
}
|
||||
// start renew job request
|
||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
|
||||
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
|
||||
// wait till first renew succeed or job request is canceled
|
||||
// not even start worker if the first renew fail
|
||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||
|
||||
Task<int> workerProcessTask = null;
|
||||
object _outputLock = new object();
|
||||
List<string> workerOutput = new List<string>();
|
||||
using (var processChannel = HostContext.CreateService<IProcessChannel>())
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
// Start the process channel.
|
||||
// It's OK if StartServer bubbles an execption after the worker process has already started.
|
||||
// The worker will shutdown after 30 seconds if it hasn't received the job message.
|
||||
processChannel.StartServer(
|
||||
// Delegate to start the child process.
|
||||
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
|
||||
{
|
||||
// Validate args.
|
||||
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
|
||||
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
|
||||
|
||||
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
|
||||
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(stdout.Data))
|
||||
{
|
||||
lock (_outputLock)
|
||||
{
|
||||
workerOutput.Add(stdout.Data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Save STDERR from worker, worker will use STDERR on crash.
|
||||
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(stderr.Data))
|
||||
{
|
||||
lock (_outputLock)
|
||||
{
|
||||
workerOutput.Add(stderr.Data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start the child process.
|
||||
HostContext.WritePerfCounter("StartingWorkerProcess");
|
||||
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
|
||||
workerProcessTask = processInvoker.ExecuteAsync(
|
||||
workingDirectory: assemblyDirectory,
|
||||
fileName: workerFileName,
|
||||
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
redirectStandardIn: null,
|
||||
inheritConsoleHandler: false,
|
||||
keepStandardInOpen: false,
|
||||
highPriorityProcess: true,
|
||||
cancellationToken: workerProcessCancelTokenSource.Token);
|
||||
});
|
||||
|
||||
// Send the job request message.
|
||||
// Kill the worker process if sending the job message times out. The worker
|
||||
// process may have successfully received the job message.
|
||||
try
|
||||
if (renewJobRequest.IsCompleted)
|
||||
{
|
||||
Trace.Info($"Send job request message to worker for job {message.JobId}.");
|
||||
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
|
||||
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
|
||||
{
|
||||
await processChannel.SendAsync(
|
||||
messageType: MessageType.NewJobRequest,
|
||||
body: JsonUtility.ToString(message),
|
||||
cancellationToken: csSendJobRequest.Token);
|
||||
}
|
||||
// renew job request task complete means we run out of retry for the first job request renew.
|
||||
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
|
||||
return;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// message send been cancelled.
|
||||
// timeout 30 sec. kill worker.
|
||||
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
|
||||
if (jobRequestCancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
lockRenewalTokenSource.Cancel();
|
||||
// renew job request should never blows up.
|
||||
await renewJobRequest;
|
||||
|
||||
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
|
||||
// complete job request with result Cancelled
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
|
||||
return;
|
||||
}
|
||||
|
||||
// we get first jobrequest renew succeed and start the worker process with the job message.
|
||||
// send notification to machine provisioner.
|
||||
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
|
||||
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
|
||||
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
|
||||
|
||||
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
|
||||
|
||||
try
|
||||
Task<int> workerProcessTask = null;
|
||||
object _outputLock = new object();
|
||||
List<string> workerOutput = new List<string>();
|
||||
using (var processChannel = HostContext.CreateService<IProcessChannel>())
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
|
||||
// wait for renewlock, worker process or cancellation token been fired.
|
||||
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
|
||||
if (completedTask == workerProcessTask)
|
||||
{
|
||||
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
|
||||
int returnCode = await workerProcessTask;
|
||||
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
|
||||
|
||||
string detailInfo = null;
|
||||
if (!TaskResultUtil.IsValidReturnCode(returnCode))
|
||||
// Start the process channel.
|
||||
// It's OK if StartServer bubbles an execption after the worker process has already started.
|
||||
// The worker will shutdown after 30 seconds if it hasn't received the job message.
|
||||
processChannel.StartServer(
|
||||
// Delegate to start the child process.
|
||||
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
|
||||
{
|
||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||
await LogWorkerProcessUnhandledException(message, detailInfo);
|
||||
// Validate args.
|
||||
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
|
||||
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
|
||||
|
||||
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
|
||||
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(stdout.Data))
|
||||
{
|
||||
lock (_outputLock)
|
||||
{
|
||||
workerOutput.Add(stdout.Data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Save STDERR from worker, worker will use STDERR on crash.
|
||||
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(stderr.Data))
|
||||
{
|
||||
lock (_outputLock)
|
||||
{
|
||||
workerOutput.Add(stderr.Data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start the child process.
|
||||
HostContext.WritePerfCounter("StartingWorkerProcess");
|
||||
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
|
||||
workerProcessTask = processInvoker.ExecuteAsync(
|
||||
workingDirectory: assemblyDirectory,
|
||||
fileName: workerFileName,
|
||||
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
redirectStandardIn: null,
|
||||
inheritConsoleHandler: false,
|
||||
keepStandardInOpen: false,
|
||||
highPriorityProcess: true,
|
||||
cancellationToken: workerProcessCancelTokenSource.Token);
|
||||
});
|
||||
|
||||
// Send the job request message.
|
||||
// Kill the worker process if sending the job message times out. The worker
|
||||
// process may have successfully received the job message.
|
||||
try
|
||||
{
|
||||
Trace.Info($"Send job request message to worker for job {message.JobId}.");
|
||||
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
|
||||
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
|
||||
{
|
||||
await processChannel.SendAsync(
|
||||
messageType: MessageType.NewJobRequest,
|
||||
body: JsonUtility.ToString(message),
|
||||
cancellationToken: csSendJobRequest.Token);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// message send been cancelled.
|
||||
// timeout 30 sec. kill worker.
|
||||
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
|
||||
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
lockRenewalTokenSource.Cancel();
|
||||
// renew job request should never blows up.
|
||||
await renewJobRequest;
|
||||
|
||||
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
|
||||
return;
|
||||
}
|
||||
|
||||
// we get first jobrequest renew succeed and start the worker process with the job message.
|
||||
// send notification to machine provisioner.
|
||||
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
|
||||
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
|
||||
|
||||
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
|
||||
|
||||
try
|
||||
{
|
||||
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
|
||||
// wait for renewlock, worker process or cancellation token been fired.
|
||||
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
|
||||
if (completedTask == workerProcessTask)
|
||||
{
|
||||
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
|
||||
int returnCode = await workerProcessTask;
|
||||
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
|
||||
|
||||
string detailInfo = null;
|
||||
if (!TaskResultUtil.IsValidReturnCode(returnCode))
|
||||
{
|
||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||
await LogWorkerProcessUnhandledException(message, detailInfo);
|
||||
}
|
||||
|
||||
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
|
||||
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
lockRenewalTokenSource.Cancel();
|
||||
// renew job request should never blows up.
|
||||
await renewJobRequest;
|
||||
|
||||
// complete job request
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
|
||||
|
||||
// print out unhandled exception happened in worker after we complete job request.
|
||||
// when we run out of disk space, report back to server has higher priority.
|
||||
if (!string.IsNullOrEmpty(detailInfo))
|
||||
{
|
||||
Trace.Error("Unhandled exception happened in worker:");
|
||||
Trace.Error(detailInfo);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
else if (completedTask == renewJobRequest)
|
||||
{
|
||||
resultOnAbandonOrCancel = TaskResult.Abandoned;
|
||||
}
|
||||
else
|
||||
{
|
||||
resultOnAbandonOrCancel = TaskResult.Canceled;
|
||||
}
|
||||
|
||||
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
|
||||
// cancel worker gracefully first, then kill it after worker cancel timeout
|
||||
try
|
||||
{
|
||||
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
|
||||
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
|
||||
{
|
||||
var messageType = MessageType.CancelRequest;
|
||||
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
|
||||
{
|
||||
switch (HostContext.RunnerShutdownReason)
|
||||
{
|
||||
case ShutdownReason.UserCancelled:
|
||||
messageType = MessageType.RunnerShutdown;
|
||||
break;
|
||||
case ShutdownReason.OperatingSystemShutdown:
|
||||
messageType = MessageType.OperatingSystemShutdown;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await processChannel.SendAsync(
|
||||
messageType: messageType,
|
||||
body: string.Empty,
|
||||
cancellationToken: csSendCancel.Token);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// message send been cancelled.
|
||||
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
// wait worker to exit
|
||||
// if worker doesn't exit within timeout, then kill worker.
|
||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||
|
||||
// worker haven't exit within cancellation timeout.
|
||||
if (completedTask != workerProcessTask)
|
||||
{
|
||||
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
|
||||
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
|
||||
// The runner will try to upload these logs at this time.
|
||||
await TryUploadUnfinishedLogs(message);
|
||||
}
|
||||
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
|
||||
// complete job request with cancel result, stop renew lock, job has finished.
|
||||
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
@@ -488,108 +591,20 @@ namespace GitHub.Runner.Listener
|
||||
await renewJobRequest;
|
||||
|
||||
// complete job request
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
|
||||
|
||||
// print out unhandled exception happened in worker after we complete job request.
|
||||
// when we run out of disk space, report back to server has higher priority.
|
||||
if (!string.IsNullOrEmpty(detailInfo))
|
||||
{
|
||||
Trace.Error("Unhandled exception happened in worker:");
|
||||
Trace.Error(detailInfo);
|
||||
}
|
||||
|
||||
return;
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
|
||||
}
|
||||
else if (completedTask == renewJobRequest)
|
||||
finally
|
||||
{
|
||||
resultOnAbandonOrCancel = TaskResult.Abandoned;
|
||||
// This should be the last thing to run so we don't notify external parties until actually finished
|
||||
await notification.JobCompleted(message.JobId);
|
||||
}
|
||||
else
|
||||
{
|
||||
resultOnAbandonOrCancel = TaskResult.Canceled;
|
||||
}
|
||||
|
||||
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
|
||||
// cancel worker gracefully first, then kill it after worker cancel timeout
|
||||
try
|
||||
{
|
||||
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
|
||||
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
|
||||
{
|
||||
var messageType = MessageType.CancelRequest;
|
||||
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
|
||||
{
|
||||
switch (HostContext.RunnerShutdownReason)
|
||||
{
|
||||
case ShutdownReason.UserCancelled:
|
||||
messageType = MessageType.RunnerShutdown;
|
||||
break;
|
||||
case ShutdownReason.OperatingSystemShutdown:
|
||||
messageType = MessageType.OperatingSystemShutdown;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
await processChannel.SendAsync(
|
||||
messageType: messageType,
|
||||
body: string.Empty,
|
||||
cancellationToken: csSendCancel.Token);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// message send been cancelled.
|
||||
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
// wait worker to exit
|
||||
// if worker doesn't exit within timeout, then kill worker.
|
||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||
|
||||
// worker haven't exit within cancellation timeout.
|
||||
if (completedTask != workerProcessTask)
|
||||
{
|
||||
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
|
||||
workerProcessCancelTokenSource.Cancel();
|
||||
try
|
||||
{
|
||||
await workerProcessTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Trace.Info("worker process has been killed.");
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
|
||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
|
||||
// complete job request with cancel result, stop renew lock, job has finished.
|
||||
|
||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||
// stop renew lock
|
||||
lockRenewalTokenSource.Cancel();
|
||||
// renew job request should never blows up.
|
||||
await renewJobRequest;
|
||||
|
||||
// complete job request
|
||||
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
|
||||
}
|
||||
finally
|
||||
{
|
||||
// This should be the last thing to run so we don't notify external parties until actually finished
|
||||
await notification.JobCompleted(message.JobId);
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
|
||||
@@ -712,6 +727,121 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
// Best effort upload any logs for this job.
|
||||
private async Task TryUploadUnfinishedLogs(Pipelines.AgentJobRequestMessage message)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
var logFolder = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), PagingLogger.PagingFolder);
|
||||
if (!Directory.Exists(logFolder))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var logs = Directory.GetFiles(logFolder);
|
||||
if (logs.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
|
||||
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
|
||||
|
||||
var jobServer = HostContext.GetService<IJobServer>();
|
||||
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
|
||||
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
|
||||
|
||||
await jobServer.ConnectAsync(jobConnection);
|
||||
|
||||
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||
|
||||
var updatedRecords = new List<TimelineRecord>();
|
||||
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
|
||||
var logRecords = new Dictionary<Guid, TimelineRecord>();
|
||||
foreach (var log in logs)
|
||||
{
|
||||
var logName = Path.GetFileNameWithoutExtension(log);
|
||||
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (logNameParts.Length != 3)
|
||||
{
|
||||
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||
continue;
|
||||
}
|
||||
var logPageSeperator = logName.IndexOf('_');
|
||||
var logRecordId = Guid.Empty;
|
||||
var pageNumber = 0;
|
||||
|
||||
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
|
||||
{
|
||||
Trace.Warning($"log file '{log}' is not belongs to current job");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Guid.TryParse(logNameParts[1], out logRecordId))
|
||||
{
|
||||
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!int.TryParse(logNameParts[2], out pageNumber))
|
||||
{
|
||||
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||
continue;
|
||||
}
|
||||
|
||||
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
|
||||
if (record != null)
|
||||
{
|
||||
if (!logPages.ContainsKey(record.Id))
|
||||
{
|
||||
logPages[record.Id] = new Dictionary<int, string>();
|
||||
logRecords[record.Id] = record;
|
||||
}
|
||||
|
||||
logPages[record.Id][pageNumber] = log;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var pages in logPages)
|
||||
{
|
||||
var record = logRecords[pages.Key];
|
||||
if (record.Log == null)
|
||||
{
|
||||
// Create the log
|
||||
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
|
||||
|
||||
// Need to post timeline record updates to reflect the log creation
|
||||
updatedRecords.Add(record.Clone());
|
||||
}
|
||||
|
||||
for (var i = 1; i <= pages.Value.Count; i++)
|
||||
{
|
||||
var logFile = pages.Value[i];
|
||||
// Upload the contents
|
||||
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||
{
|
||||
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
|
||||
}
|
||||
|
||||
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
|
||||
IOUtil.DeleteFile(logFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (updatedRecords.Count > 0)
|
||||
{
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Ignore any error during log upload since it's best effort
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: We need send detailInfo back to DT in order to add an issue for the job
|
||||
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
|
||||
{
|
||||
|
||||
@@ -13,7 +13,10 @@ using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi;
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("Test")]
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
[ServiceLocator(Default = typeof(MessageListener))]
|
||||
@@ -32,18 +35,30 @@ namespace GitHub.Runner.Listener
|
||||
private ITerminal _term;
|
||||
private IRunnerServer _runnerServer;
|
||||
private TaskAgentSession _session;
|
||||
private ICredentialManager _credMgr;
|
||||
private IConfigurationStore _configStore;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
|
||||
|
||||
// Whether load credentials from .credentials_migrated file
|
||||
internal bool _useMigratedCredentials;
|
||||
|
||||
// need to check auth url if there is only .credentials and auth schema is OAuth
|
||||
internal bool _needToCheckAuthorizationUrlUpdate;
|
||||
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
|
||||
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
|
||||
_term = HostContext.GetService<ITerminal>();
|
||||
_runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
_credMgr = HostContext.GetService<ICredentialManager>();
|
||||
_configStore = HostContext.GetService<IConfigurationStore>();
|
||||
}
|
||||
|
||||
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
|
||||
@@ -58,8 +73,8 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// Create connection.
|
||||
Trace.Info("Loading Credentials");
|
||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||
VssCredentials creds = credMgr.LoadCredentials();
|
||||
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
|
||||
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
|
||||
|
||||
var agent = new TaskAgentReference
|
||||
{
|
||||
@@ -74,6 +89,17 @@ namespace GitHub.Runner.Listener
|
||||
string errorMessage = string.Empty;
|
||||
bool encounteringError = false;
|
||||
|
||||
var originalCreds = _configStore.GetCredentials();
|
||||
var migratedCreds = _configStore.GetMigratedCredentials();
|
||||
if (migratedCreds == null)
|
||||
{
|
||||
_useMigratedCredentials = false;
|
||||
if (originalCreds.Scheme == Constants.Configuration.OAuth)
|
||||
{
|
||||
_needToCheckAuthorizationUrlUpdate = true;
|
||||
}
|
||||
}
|
||||
|
||||
while (true)
|
||||
{
|
||||
token.ThrowIfCancellationRequested();
|
||||
@@ -83,7 +109,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info("Connecting to the Runner Server...");
|
||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
||||
Trace.Info("VssConnection created");
|
||||
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteSuccessMessage("Connected to GitHub");
|
||||
_term.WriteLine();
|
||||
@@ -101,6 +127,12 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = false;
|
||||
}
|
||||
|
||||
if (_needToCheckAuthorizationUrlUpdate)
|
||||
{
|
||||
// start background task try to get new authorization url
|
||||
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
@@ -120,8 +152,21 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
if (!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
return false;
|
||||
if (_useMigratedCredentials)
|
||||
{
|
||||
// migrated credentials might cause lose permission during permission check,
|
||||
// we will force to use original credential and try again
|
||||
_useMigratedCredentials = false;
|
||||
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
|
||||
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
|
||||
creds = _credMgr.LoadCredentials(false);
|
||||
Trace.Error("Fallback to original credentials and try again.");
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!encounteringError) //print the message only on the first error
|
||||
@@ -182,6 +227,51 @@ namespace GitHub.Runner.Listener
|
||||
encounteringError = false;
|
||||
continuousError = 0;
|
||||
}
|
||||
|
||||
if (_needToCheckAuthorizationUrlUpdate &&
|
||||
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
|
||||
{
|
||||
if (HostContext.GetService<IJobDispatcher>().Busy ||
|
||||
HostContext.GetService<ISelfUpdater>().Busy)
|
||||
{
|
||||
Trace.Info("Job or runner updates in progress, update credentials next time.");
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
var newCred = await _authorizationUrlMigrationBackgroundTask;
|
||||
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
|
||||
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
|
||||
_useMigratedCredentials = true;
|
||||
_authorizationUrlMigrationBackgroundTask = null;
|
||||
_needToCheckAuthorizationUrlUpdate = false;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to refresh connection with new authorization url.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
|
||||
{
|
||||
try
|
||||
{
|
||||
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
|
||||
Trace.Info("Re-attempt to use migrated credential");
|
||||
var migratedCreds = _credMgr.LoadCredentials();
|
||||
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
|
||||
_useMigratedCredentials = true;
|
||||
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
@@ -205,7 +295,21 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else if (!IsGetNextMessageExceptionRetriable(ex))
|
||||
{
|
||||
throw;
|
||||
if (_useMigratedCredentials)
|
||||
{
|
||||
// migrated credentials might cause lose permission during permission check,
|
||||
// we will force to use original credential and try again
|
||||
_useMigratedCredentials = false;
|
||||
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
|
||||
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
|
||||
var originalCreds = _credMgr.LoadCredentials(false);
|
||||
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
|
||||
Trace.Error("Fallback to original credentials and try again.");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -397,5 +501,80 @@ namespace GitHub.Runner.Listener
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token)
|
||||
{
|
||||
Trace.Info("Start checking oauth authorization url update.");
|
||||
while (true)
|
||||
{
|
||||
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
|
||||
await HostContext.Delay(backoff, token);
|
||||
|
||||
try
|
||||
{
|
||||
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
|
||||
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
|
||||
{
|
||||
var credData = _configStore.GetCredentials();
|
||||
var clientId = credData.Data.GetValueOrDefault("clientId", null);
|
||||
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
|
||||
|
||||
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// We don't need to update credentials.
|
||||
Trace.Info("No needs to update authorization url");
|
||||
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
|
||||
}
|
||||
|
||||
var keyManager = HostContext.GetService<IRSAKeyManager>();
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
||||
|
||||
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
|
||||
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
|
||||
|
||||
Trace.Info("Try connect service with Token Service OAuth endpoint.");
|
||||
var runnerServer = HostContext.CreateService<IRunnerServer>();
|
||||
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
|
||||
await runnerServer.GetAgentPoolsAsync();
|
||||
Trace.Info($"Successfully connected service with new authorization url.");
|
||||
|
||||
var migratedCredData = new CredentialData
|
||||
{
|
||||
Scheme = Constants.Configuration.OAuth,
|
||||
Data =
|
||||
{
|
||||
{ "clientId", clientId },
|
||||
{ "authorizationUrl", migratedAuthorizationUrl },
|
||||
{ "oauthEndpointUrl", migratedAuthorizationUrl },
|
||||
},
|
||||
};
|
||||
|
||||
_configStore.SaveMigratedCredential(migratedCredData);
|
||||
return migratedRunnerCredential;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Verbose("No authorization url updates");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error("Fail to get/test new authorization url.");
|
||||
Trace.Error(ex);
|
||||
|
||||
try
|
||||
{
|
||||
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
// best effort
|
||||
Trace.Error("Fail to report the migration error");
|
||||
Trace.Error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -451,16 +451,38 @@ namespace GitHub.Runner.Listener
|
||||
ext = "sh";
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
Commands:,
|
||||
.{separator}config.{ext} Configures the runner
|
||||
.{separator}config.{ext} remove Unconfigures the runner
|
||||
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
|
||||
Commands:
|
||||
.{separator}config.{ext} Configures the runner
|
||||
.{separator}config.{ext} remove Unconfigures the runner
|
||||
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
|
||||
|
||||
Options:
|
||||
--help Prints the help for each command
|
||||
--version Prints the runner version
|
||||
--commit Prints the runner commit
|
||||
--help Prints the help for each command
|
||||
");
|
||||
|
||||
Config Options:
|
||||
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
|
||||
--url string Repository to add the runner to. Required if unattended
|
||||
--token string Registration token. Required if unattended
|
||||
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)");
|
||||
#if OS_WINDOWS
|
||||
_term.WriteLine($@" --runasservice Run the runner as a service");
|
||||
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
Examples:
|
||||
Configure a runner non-interactively:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token>
|
||||
Configure a runner non-interactively, replacing any existing runner with the same name:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]");
|
||||
#if OS_WINDOWS
|
||||
_term.WriteLine($@" Configure a runner to run as a service:");
|
||||
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ namespace GitHub.Runner.Listener
|
||||
[ServiceLocator(Default = typeof(SelfUpdater))]
|
||||
public interface ISelfUpdater : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||
}
|
||||
|
||||
@@ -31,6 +32,8 @@ namespace GitHub.Runner.Listener
|
||||
private int _poolId;
|
||||
private int _agentId;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
@@ -45,52 +48,60 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
{
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
return false;
|
||||
}
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
Trace.Info($"Can't find available update package.");
|
||||
return false;
|
||||
}
|
||||
|
||||
Trace.Info($"An update is available.");
|
||||
Trace.Info($"An update is available.");
|
||||
|
||||
// Print console line that warn user not shutdown runner.
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
// Print console line that warn user not shutdown runner.
|
||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||
|
||||
await DownloadLatestRunner(token);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
await DownloadLatestRunner(token);
|
||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||
|
||||
// wait till all running job finish
|
||||
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
|
||||
// wait till all running job finish
|
||||
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
|
||||
|
||||
await jobDispatcher.WaitAsync(token);
|
||||
Trace.Info($"All running job has exited.");
|
||||
await jobDispatcher.WaitAsync(token);
|
||||
Trace.Info($"All running job has exited.");
|
||||
|
||||
// delete runner backup
|
||||
DeletePreviousVersionRunnerBackup(token);
|
||||
Trace.Info($"Delete old version runner backup.");
|
||||
// delete runner backup
|
||||
DeletePreviousVersionRunnerBackup(token);
|
||||
Trace.Info($"Delete old version runner backup.");
|
||||
|
||||
// generate update script from template
|
||||
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
|
||||
// generate update script from template
|
||||
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
|
||||
|
||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
|
||||
Trace.Info($"Generate update script into: {updateScript}");
|
||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
|
||||
Trace.Info($"Generate update script into: {updateScript}");
|
||||
|
||||
// kick off update script
|
||||
Process invokeScript = new Process();
|
||||
// kick off update script
|
||||
Process invokeScript = new Process();
|
||||
#if OS_WINDOWS
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||
#elif (OS_OSX || OS_LINUX)
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||
#endif
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
invokeScript.Start();
|
||||
Trace.Info($"Update script start running");
|
||||
|
||||
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
|
||||
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
|
||||
|
||||
return true;
|
||||
return true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Build.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Plugins.Artifact
|
||||
{
|
||||
// A client wrapper interacting with Build's Artifact API
|
||||
public class BuildServer
|
||||
{
|
||||
private readonly BuildHttpClient _buildHttpClient;
|
||||
|
||||
public BuildServer(VssConnection connection)
|
||||
{
|
||||
ArgUtil.NotNull(connection, nameof(connection));
|
||||
_buildHttpClient = connection.GetClient<BuildHttpClient>();
|
||||
}
|
||||
|
||||
// Associate the specified artifact with a build, along with custom data.
|
||||
public async Task<BuildArtifact> AssociateArtifact(
|
||||
Guid projectId,
|
||||
int pipelineId,
|
||||
string jobId,
|
||||
string name,
|
||||
string type,
|
||||
string data,
|
||||
Dictionary<string, string> propertiesDictionary,
|
||||
CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
BuildArtifact artifact = new BuildArtifact()
|
||||
{
|
||||
Name = name,
|
||||
Source = jobId,
|
||||
Resource = new ArtifactResource()
|
||||
{
|
||||
Data = data,
|
||||
Type = type,
|
||||
Properties = propertiesDictionary
|
||||
}
|
||||
};
|
||||
|
||||
return await _buildHttpClient.CreateArtifactAsync(artifact, projectId, pipelineId, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
// Get named artifact from a build
|
||||
public async Task<BuildArtifact> GetArtifact(
|
||||
Guid projectId,
|
||||
int pipelineId,
|
||||
string name,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
return await _buildHttpClient.GetArtifactAsync(projectId, pipelineId, name, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Build.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
@@ -40,70 +39,31 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
|
||||
targetPath = Path.IsPathFullyQualified(targetPath) ? targetPath : Path.GetFullPath(Path.Combine(defaultWorkingDirectory, targetPath));
|
||||
|
||||
// Project ID
|
||||
Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString());
|
||||
|
||||
// Build ID
|
||||
string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty;
|
||||
string buildIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.BuildId)?.Value ?? string.Empty;
|
||||
if (!int.TryParse(buildIdStr, out int buildId))
|
||||
{
|
||||
throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}");
|
||||
}
|
||||
|
||||
// Determine whether to call Pipelines or Build endpoint to publish artifact based on variable setting
|
||||
string usePipelinesArtifactEndpointVar = context.Variables.GetValueOrDefault("Runner.UseActionsArtifactsApis")?.Value;
|
||||
bool.TryParse(usePipelinesArtifactEndpointVar, out bool usePipelinesArtifactEndpoint);
|
||||
string containerPath;
|
||||
long containerId;
|
||||
|
||||
context.Output($"Downloading artifact '{artifactName}' to: '{targetPath}'");
|
||||
|
||||
if (usePipelinesArtifactEndpoint)
|
||||
// Definition ID is a dummy value only used by HTTP client routing purposes
|
||||
int definitionId = 1;
|
||||
|
||||
var pipelinesHelper = new PipelinesServer(context.VssConnection);
|
||||
|
||||
var actionsStorageArtifact = await pipelinesHelper.GetActionsStorageArtifact(definitionId, buildId, artifactName, token);
|
||||
|
||||
if (actionsStorageArtifact == null)
|
||||
{
|
||||
context.Debug("Downloading artifact using v2 endpoint");
|
||||
|
||||
// Definition ID is a dummy value only used by HTTP client routing purposes
|
||||
int definitionId = 1;
|
||||
|
||||
var pipelinesHelper = new PipelinesServer(context.VssConnection);
|
||||
|
||||
var actionsStorageArtifact = await pipelinesHelper.GetActionsStorageArtifact(definitionId, buildId, artifactName, token);
|
||||
|
||||
if (actionsStorageArtifact == null)
|
||||
{
|
||||
throw new Exception($"The actions storage artifact for '{artifactName}' could not be found, or is no longer available");
|
||||
}
|
||||
|
||||
containerPath = actionsStorageArtifact.Name; // In actions storage artifacts, name equals the path
|
||||
containerId = actionsStorageArtifact.ContainerId;
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Debug("Downloading artifact using v1 endpoint");
|
||||
|
||||
BuildServer buildHelper = new BuildServer(context.VssConnection);
|
||||
BuildArtifact buildArtifact = await buildHelper.GetArtifact(projectId, buildId, artifactName, token);
|
||||
|
||||
if (string.Equals(buildArtifact.Resource.Type, "Container", StringComparison.OrdinalIgnoreCase) ||
|
||||
// Artifact was published by Pipelines endpoint, check new type here to handle rollback scenario
|
||||
string.Equals(buildArtifact.Resource.Type, "Actions_Storage", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
string containerUrl = buildArtifact.Resource.Data;
|
||||
string[] parts = containerUrl.Split(new[] { '/' }, 3);
|
||||
if (parts.Length < 3 || !long.TryParse(parts[1], out containerId))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException($"Invalid container url '{containerUrl}' for artifact '{buildArtifact.Name}'");
|
||||
}
|
||||
|
||||
containerPath = parts[2];
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException($"Invalid artifact type: {buildArtifact.Resource.Type}");
|
||||
}
|
||||
throw new Exception($"The actions storage artifact for '{artifactName}' could not be found, or is no longer available");
|
||||
}
|
||||
|
||||
FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId, containerId, containerPath);
|
||||
string containerPath = actionsStorageArtifact.Name; // In actions storage artifacts, name equals the path
|
||||
long containerId = actionsStorageArtifact.ContainerId;
|
||||
|
||||
FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId: new Guid(), containerId, containerPath);
|
||||
await fileContainerServer.DownloadFromContainerAsync(context, targetPath, token);
|
||||
|
||||
context.Output("Artifact download finished.");
|
||||
|
||||
@@ -4,9 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Build.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Plugins.Artifact
|
||||
@@ -45,11 +43,8 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
throw new ArgumentException($"Artifact name is not valid: {artifactName}. It cannot contain '\\', '/', \"', ':', '<', '>', '|', '*', and '?'");
|
||||
}
|
||||
|
||||
// Project ID
|
||||
Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString());
|
||||
|
||||
// Build ID
|
||||
string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty;
|
||||
string buildIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.BuildId)?.Value ?? string.Empty;
|
||||
if (!int.TryParse(buildIdStr, out int buildId))
|
||||
{
|
||||
throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}");
|
||||
@@ -65,7 +60,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
}
|
||||
|
||||
// Container ID
|
||||
string containerIdStr = context.Variables.GetValueOrDefault(BuildVariables.ContainerId)?.Value ?? string.Empty;
|
||||
string containerIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.ContainerId)?.Value ?? string.Empty;
|
||||
if (!long.TryParse(containerIdStr, out long containerId))
|
||||
{
|
||||
throw new ArgumentException($"Container Id is not an Int64: {containerIdStr}");
|
||||
@@ -73,7 +68,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
|
||||
context.Output($"Uploading artifact '{artifactName}' from '{fullPath}' for run #{buildId}");
|
||||
|
||||
FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId, containerId, artifactName);
|
||||
FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId: Guid.Empty, containerId, artifactName);
|
||||
var propertiesDictionary = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
long size = 0;
|
||||
@@ -89,38 +84,20 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
// if any of the results were successful, make sure to attach them to the build
|
||||
finally
|
||||
{
|
||||
// Determine whether to call Pipelines or Build endpoint to publish artifact based on variable setting
|
||||
string usePipelinesArtifactEndpointVar = context.Variables.GetValueOrDefault("Runner.UseActionsArtifactsApis")?.Value;
|
||||
bool.TryParse(usePipelinesArtifactEndpointVar, out bool usePipelinesArtifactEndpoint);
|
||||
// Definition ID is a dummy value only used by HTTP client routing purposes
|
||||
int definitionId = 1;
|
||||
|
||||
if (usePipelinesArtifactEndpoint)
|
||||
{
|
||||
// Definition ID is a dummy value only used by HTTP client routing purposes
|
||||
int definitionId = 1;
|
||||
PipelinesServer pipelinesHelper = new PipelinesServer(context.VssConnection);
|
||||
|
||||
PipelinesServer pipelinesHelper = new PipelinesServer(context.VssConnection);
|
||||
var artifact = await pipelinesHelper.AssociateActionsStorageArtifactAsync(
|
||||
definitionId,
|
||||
buildId,
|
||||
containerId,
|
||||
artifactName,
|
||||
size,
|
||||
token);
|
||||
|
||||
var artifact = await pipelinesHelper.AssociateActionsStorageArtifactAsync(
|
||||
definitionId,
|
||||
buildId,
|
||||
containerId,
|
||||
artifactName,
|
||||
size,
|
||||
token);
|
||||
|
||||
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
|
||||
context.Debug($"Associated artifact using v2 endpoint");
|
||||
}
|
||||
else
|
||||
{
|
||||
string fileContainerFullPath = StringUtil.Format($"#/{containerId}/{artifactName}");
|
||||
BuildServer buildHelper = new BuildServer(context.VssConnection);
|
||||
string jobId = context.Variables.GetValueOrDefault(WellKnownDistributedTaskVariables.JobId).Value ?? string.Empty;
|
||||
var artifact = await buildHelper.AssociateArtifact(projectId, buildId, jobId, artifactName, ArtifactResourceTypes.Container, fileContainerFullPath, propertiesDictionary, token);
|
||||
|
||||
context.Output($"Associated artifact {artifactName} ({artifact.Id}) with run #{buildId}");
|
||||
context.Debug($"Associated artifact using v1 endpoint");
|
||||
}
|
||||
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,6 +271,14 @@ namespace GitHub.Runner.Sdk
|
||||
// Indicate GitHub Actions process.
|
||||
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
|
||||
|
||||
// Set CI=true when no one else already set it.
|
||||
// CI=true is common set in most CI provider in GitHub
|
||||
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
|
||||
Environment.GetEnvironmentVariable("CI") == null)
|
||||
{
|
||||
_proc.StartInfo.Environment["CI"] = "true";
|
||||
}
|
||||
|
||||
// Hook up the events.
|
||||
_proc.EnableRaisingEvents = true;
|
||||
_proc.Exited += ProcessExitedHandler;
|
||||
|
||||
@@ -21,6 +21,7 @@ namespace GitHub.Runner.Sdk
|
||||
private string _httpsProxyAddress;
|
||||
private string _httpsProxyUsername;
|
||||
private string _httpsProxyPassword;
|
||||
private string _noProxyString;
|
||||
|
||||
private readonly List<ByPassInfo> _noProxyList = new List<ByPassInfo>();
|
||||
private readonly HashSet<string> _noProxyUnique = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -33,6 +34,7 @@ namespace GitHub.Runner.Sdk
|
||||
public string HttpsProxyAddress => _httpsProxyAddress;
|
||||
public string HttpsProxyUsername => _httpsProxyUsername;
|
||||
public string HttpsProxyPassword => _httpsProxyPassword;
|
||||
public string NoProxyString => _noProxyString;
|
||||
|
||||
public List<ByPassInfo> NoProxyList => _noProxyList;
|
||||
|
||||
@@ -71,6 +73,10 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
|
||||
Environment.SetEnvironmentVariable("http_proxy", _httpProxyAddress);
|
||||
|
||||
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
||||
var userInfo = Uri.UnescapeDataString(proxyHttpUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (userInfo.Length == 2)
|
||||
@@ -97,6 +103,10 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
|
||||
Environment.SetEnvironmentVariable("https_proxy", _httpsProxyAddress);
|
||||
|
||||
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
||||
var userInfo = Uri.UnescapeDataString(proxyHttpsUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (userInfo.Length == 2)
|
||||
@@ -121,6 +131,12 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (!string.IsNullOrEmpty(noProxyList))
|
||||
{
|
||||
_noProxyString = noProxyList;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("NO_PROXY", noProxyList);
|
||||
Environment.SetEnvironmentVariable("no_proxy", noProxyList);
|
||||
|
||||
var noProxyListSplit = noProxyList.Split(',', StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (string noProxy in noProxyListSplit)
|
||||
{
|
||||
|
||||
19
src/Runner.Sdk/SdkConstants.cs
Normal file
19
src/Runner.Sdk/SdkConstants.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public class SdkConstants
|
||||
{
|
||||
public static class Variables
|
||||
{
|
||||
public static class Build
|
||||
{
|
||||
// Legacy "build" variables historically used by the runner
|
||||
// DO NOT add new variables here -- instead use either the Actions or Runner namespaces
|
||||
public const String BuildId = "build.buildId";
|
||||
public const String BuildNumber = "build.buildNumber";
|
||||
public const String ContainerId = "build.containerId";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -15,14 +16,14 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
void EnablePluginInternalCommand();
|
||||
void DisablePluginInternalCommand();
|
||||
bool TryProcessCommand(IExecutionContext context, string input);
|
||||
bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container);
|
||||
}
|
||||
|
||||
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
|
||||
{
|
||||
private const string _stopCommand = "stop-commands";
|
||||
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
|
||||
private HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly object _commandSerializeLock = new object();
|
||||
private bool _stopProcessCommand = false;
|
||||
private string _stopToken = null;
|
||||
@@ -58,7 +59,7 @@ namespace GitHub.Runner.Worker
|
||||
_registeredCommands.Remove("internal-set-repo-path");
|
||||
}
|
||||
|
||||
public bool TryProcessCommand(IExecutionContext context, string input)
|
||||
public bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrEmpty(input))
|
||||
{
|
||||
@@ -114,7 +115,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
try
|
||||
{
|
||||
extension.ProcessCommand(context, input, actionCommand);
|
||||
extension.ProcessCommand(context, input, actionCommand, container);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -140,7 +141,7 @@ namespace GitHub.Runner.Worker
|
||||
string Command { get; }
|
||||
bool OmitEcho { get; }
|
||||
|
||||
void ProcessCommand(IExecutionContext context, string line, ActionCommand command);
|
||||
void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container);
|
||||
}
|
||||
|
||||
public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension
|
||||
@@ -150,7 +151,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName))
|
||||
{
|
||||
@@ -180,7 +181,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
|
||||
{
|
||||
@@ -205,7 +206,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
|
||||
{
|
||||
@@ -229,7 +230,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
|
||||
{
|
||||
@@ -253,7 +254,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(command.Data))
|
||||
{
|
||||
@@ -279,7 +280,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(command.Data, "path");
|
||||
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||
@@ -294,7 +295,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var file = command.Data;
|
||||
|
||||
@@ -306,9 +307,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Translate file path back from container path
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
}
|
||||
|
||||
// Root the path
|
||||
@@ -341,7 +342,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner);
|
||||
var file = command.Data;
|
||||
@@ -369,9 +370,9 @@ namespace GitHub.Runner.Worker
|
||||
else
|
||||
{
|
||||
// Translate file path back from container path
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
}
|
||||
|
||||
// Root the path
|
||||
@@ -409,7 +410,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
context.Debug(command.Data);
|
||||
}
|
||||
@@ -437,7 +438,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
command.Properties.TryGetValue(IssueCommandProperties.File, out string file);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
||||
@@ -454,10 +455,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
issue.Category = "Code";
|
||||
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
// Translate file path back from container path
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
command.Properties[IssueCommandProperties.File] = file;
|
||||
}
|
||||
|
||||
@@ -517,7 +518,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var data = this is GroupCommandExtension ? command.Data : string.Empty;
|
||||
context.Output($"##[{Command}]{data}");
|
||||
@@ -531,7 +532,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(command.Data, "value");
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
private const int _defaultFileStreamBufferSize = 4096;
|
||||
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
private const int _defaultCopyBufferSize = 81920;
|
||||
|
||||
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
|
||||
@@ -198,14 +198,21 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Load action that reference repository from '{actionDirectory}'");
|
||||
definition.Directory = actionDirectory;
|
||||
|
||||
string manifestFile = Path.Combine(actionDirectory, "action.yml");
|
||||
string manifestFile = Path.Combine(actionDirectory, Constants.Path.ActionManifestYmlFile);
|
||||
string manifestFileYaml = Path.Combine(actionDirectory, Constants.Path.ActionManifestYamlFile);
|
||||
string dockerFile = Path.Combine(actionDirectory, "Dockerfile");
|
||||
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
||||
if (File.Exists(manifestFile))
|
||||
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
|
||||
{
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
||||
|
||||
if (File.Exists(manifestFile))
|
||||
{
|
||||
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
||||
}
|
||||
else
|
||||
{
|
||||
definition.Data = manifestManager.Load(executionContext, manifestFileYaml);
|
||||
}
|
||||
Trace.Verbose($"Action friendly name: '{definition.Data.Name}'");
|
||||
Trace.Verbose($"Action description: '{definition.Data.Description}'");
|
||||
|
||||
@@ -314,7 +321,7 @@ namespace GitHub.Runner.Worker
|
||||
else
|
||||
{
|
||||
var fullPath = IOUtil.ResolvePath(actionDirectory, "."); // resolve full path without access filesystem.
|
||||
throw new NotSupportedException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||
throw new NotSupportedException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||
}
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Script)
|
||||
@@ -477,7 +484,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
int retryCount = 0;
|
||||
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
int timeoutSeconds = 20 * 60;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
@@ -655,12 +662,21 @@ namespace GitHub.Runner.Worker
|
||||
// find the docker file or action.yml file
|
||||
var dockerFile = Path.Combine(actionEntryDirectory, "Dockerfile");
|
||||
var dockerFileLowerCase = Path.Combine(actionEntryDirectory, "dockerfile");
|
||||
var actionManifest = Path.Combine(actionEntryDirectory, "action.yml");
|
||||
if (File.Exists(actionManifest))
|
||||
var actionManifest = Path.Combine(actionEntryDirectory, Constants.Path.ActionManifestYmlFile);
|
||||
var actionManifestYaml = Path.Combine(actionEntryDirectory, Constants.Path.ActionManifestYamlFile);
|
||||
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
|
||||
{
|
||||
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
var actionDefinitionData = manifestManager.Load(executionContext, actionManifest);
|
||||
ActionDefinitionData actionDefinitionData = null;
|
||||
if (File.Exists(actionManifest))
|
||||
{
|
||||
actionDefinitionData = manifestManager.Load(executionContext, actionManifest);
|
||||
}
|
||||
else
|
||||
{
|
||||
actionDefinitionData = manifestManager.Load(executionContext, actionManifestYaml);
|
||||
}
|
||||
|
||||
if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container)
|
||||
{
|
||||
@@ -720,7 +736,7 @@ namespace GitHub.Runner.Worker
|
||||
else
|
||||
{
|
||||
var fullPath = IOUtil.ResolvePath(actionEntryDirectory, "."); // resolve full path without access filesystem.
|
||||
throw new InvalidOperationException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ namespace GitHub.Runner.Worker
|
||||
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
|
||||
{
|
||||
private TemplateSchema _actionManifestSchema;
|
||||
private IReadOnlyList<String> _fileTable;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
@@ -61,6 +62,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Get the file ID
|
||||
var fileId = context.GetFileId(manifestFile);
|
||||
_fileTable = context.GetFileTable();
|
||||
|
||||
// Read the file
|
||||
var fileContent = File.ReadAllText(manifestFile);
|
||||
using (var stringReader = new StringReader(fileContent))
|
||||
{
|
||||
@@ -116,7 +120,7 @@ namespace GitHub.Runner.Worker
|
||||
if (actionDefinition.Execution == null)
|
||||
{
|
||||
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||
throw new ArgumentException($"Top level 'run:' section is required for {manifestFile}");
|
||||
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -265,6 +269,15 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Add the file table
|
||||
if (_fileTable?.Count > 0)
|
||||
{
|
||||
for (var i = 0 ; i < _fileTable.Count ; i++)
|
||||
{
|
||||
result.GetFileId(_fileTable[i]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -341,7 +354,7 @@ namespace GitHub.Runner.Worker
|
||||
EntryPoint = entrypointToken?.Value,
|
||||
Environment = envToken,
|
||||
Cleanup = postEntrypointToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -357,7 +370,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
Script = mainToken.Value,
|
||||
Cleanup = postToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -415,566 +428,5 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a YAML file into a TemplateToken
|
||||
/// </summary>
|
||||
internal sealed class YamlObjectReader : IObjectReader
|
||||
{
|
||||
internal YamlObjectReader(
|
||||
Int32? fileId,
|
||||
TextReader input)
|
||||
{
|
||||
m_fileId = fileId;
|
||||
m_parser = new Parser(input);
|
||||
}
|
||||
|
||||
public Boolean AllowLiteral(out LiteralToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is Scalar scalar)
|
||||
{
|
||||
// Tag specified
|
||||
if (!string.IsNullOrEmpty(scalar.Tag))
|
||||
{
|
||||
// String tag
|
||||
if (string.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Not plain style
|
||||
if (scalar.Style != ScalarStyle.Plain)
|
||||
{
|
||||
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
// Boolean, Float, Integer, or Null
|
||||
switch (scalar.Tag)
|
||||
{
|
||||
case c_booleanTag:
|
||||
value = ParseBoolean(scalar);
|
||||
break;
|
||||
case c_floatTag:
|
||||
value = ParseFloat(scalar);
|
||||
break;
|
||||
case c_integerTag:
|
||||
value = ParseInteger(scalar);
|
||||
break;
|
||||
case c_nullTag:
|
||||
value = ParseNull(scalar);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
if (scalar.Style == ScalarStyle.Plain)
|
||||
{
|
||||
if (MatchNull(scalar, out var nullToken))
|
||||
{
|
||||
value = nullToken;
|
||||
}
|
||||
else if (MatchBoolean(scalar, out var booleanToken))
|
||||
{
|
||||
value = booleanToken;
|
||||
}
|
||||
else if (MatchInteger(scalar, out var numberToken) ||
|
||||
MatchFloat(scalar, out numberToken))
|
||||
{
|
||||
value = numberToken;
|
||||
}
|
||||
else
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise assume string
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceStart(out SequenceToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceStart sequenceStart)
|
||||
{
|
||||
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingStart(out MappingToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is MappingStart mappingStart)
|
||||
{
|
||||
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is MappingEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
|
||||
/// </summary>
|
||||
public void ValidateEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is DocumentEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document end parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream end parse event");
|
||||
}
|
||||
|
||||
if (MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected end of parse events");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
|
||||
/// </summary>
|
||||
public void ValidateStart()
|
||||
{
|
||||
if (EvaluateCurrent() != null)
|
||||
{
|
||||
throw new InvalidOperationException("Unexpected parser state");
|
||||
}
|
||||
|
||||
if (!MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected a parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream start parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is DocumentStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document start parse event");
|
||||
}
|
||||
}
|
||||
|
||||
private ParsingEvent EvaluateCurrent()
|
||||
{
|
||||
if (m_current == null)
|
||||
{
|
||||
m_current = m_parser.Current;
|
||||
if (m_current != null)
|
||||
{
|
||||
if (m_current is Scalar scalar)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (scalar.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is MappingStart mappingStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (mappingStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is SequenceStart sequenceStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (sequenceStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (!(m_current is MappingEnd) &&
|
||||
!(m_current is SequenceEnd) &&
|
||||
!(m_current is DocumentStart) &&
|
||||
!(m_current is DocumentEnd) &&
|
||||
!(m_current is StreamStart) &&
|
||||
!(m_current is StreamEnd))
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m_current;
|
||||
}
|
||||
|
||||
private Boolean MoveNext()
|
||||
{
|
||||
m_current = null;
|
||||
return m_parser.MoveNext();
|
||||
}
|
||||
|
||||
private BooleanToken ParseBoolean(Scalar scalar)
|
||||
{
|
||||
if (MatchBoolean(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_booleanTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseFloat(Scalar scalar)
|
||||
{
|
||||
if (MatchFloat(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseInteger(Scalar scalar)
|
||||
{
|
||||
if (MatchInteger(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NullToken ParseNull(Scalar scalar)
|
||||
{
|
||||
if (MatchNull(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_nullTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private Boolean MatchBoolean(
|
||||
Scalar scalar,
|
||||
out BooleanToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? string.Empty)
|
||||
{
|
||||
case "true":
|
||||
case "True":
|
||||
case "TRUE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
|
||||
return true;
|
||||
case "false":
|
||||
case "False":
|
||||
case "FALSE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchFloat(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!string.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
|
||||
switch (str)
|
||||
{
|
||||
case ".inf":
|
||||
case ".Inf":
|
||||
case ".INF":
|
||||
case "+.inf":
|
||||
case "+.Inf":
|
||||
case "+.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
|
||||
return true;
|
||||
case "-.inf":
|
||||
case "-.Inf":
|
||||
case "-.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
|
||||
return true;
|
||||
case ".nan":
|
||||
case ".NaN":
|
||||
case ".NAN":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
|
||||
|
||||
// Skip leading sign
|
||||
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
|
||||
|
||||
// Check for integer portion
|
||||
var length = str.Length;
|
||||
var hasInteger = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasInteger = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal point
|
||||
var hasDot = false;
|
||||
if (index < length && str[index] == '.')
|
||||
{
|
||||
hasDot = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal portion
|
||||
var hasDecimal = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasDecimal = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
|
||||
if ((hasDot && hasDecimal) || hasInteger)
|
||||
{
|
||||
// Check for end
|
||||
if (index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
// Check [eE][-+]?[0-9]
|
||||
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
|
||||
{
|
||||
index++;
|
||||
|
||||
// Skip sign
|
||||
if (index < length && (str[index] == '-' || str[index] == '+'))
|
||||
{
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for exponent
|
||||
var hasExponent = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasExponent = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for end
|
||||
if (hasExponent && index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchInteger(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!string.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [0-9]+
|
||||
var firstChar = str[0];
|
||||
if (firstChar >= '0' && firstChar <= '9' &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for (-|+)[0-9]+
|
||||
else if ((firstChar == '-' || firstChar == '+') &&
|
||||
str.Length > 1 &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0x[0-9a-fA-F]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'x' &&
|
||||
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
|
||||
{
|
||||
// Try parse
|
||||
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0o[0-9]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'o' &&
|
||||
str.Skip(2).All(x => x >= '0' && x <= '7'))
|
||||
{
|
||||
// Try parse
|
||||
var integerValue = default(Int32);
|
||||
try
|
||||
{
|
||||
integerValue = Convert.ToInt32(str.Substring(2), 8);
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
catch (Exception)
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchNull(
|
||||
Scalar scalar,
|
||||
out NullToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? string.Empty)
|
||||
{
|
||||
case "":
|
||||
case "null":
|
||||
case "Null":
|
||||
case "NULL":
|
||||
case "~":
|
||||
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private void ThrowInvalidValue(
|
||||
Scalar scalar,
|
||||
String tag)
|
||||
{
|
||||
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
private const String c_booleanTag = "tag:yaml.org,2002:bool";
|
||||
private const String c_floatTag = "tag:yaml.org,2002:float";
|
||||
private const String c_integerTag = "tag:yaml.org,2002:int";
|
||||
private const String c_nullTag = "tag:yaml.org,2002:null";
|
||||
private const String c_stringTag = "tag:yaml.org,2002:string";
|
||||
private readonly Int32? m_fileId;
|
||||
private readonly Parser m_parser;
|
||||
private ParsingEvent m_current;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -141,9 +141,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Load the inputs.
|
||||
ExecutionContext.Debug("Loading inputs");
|
||||
var templateTrace = ExecutionContext.ToTemplateTraceWriter();
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
|
||||
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues);
|
||||
|
||||
foreach (KeyValuePair<string, string> input in inputs)
|
||||
@@ -179,17 +177,15 @@ namespace GitHub.Runner.Worker
|
||||
ExecutionContext.Debug("Loading env");
|
||||
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Apply environment set using ##[set-env] first since these are job level env
|
||||
foreach (var env in ExecutionContext.EnvironmentVariables)
|
||||
#if OS_WINDOWS
|
||||
var envContext = ExecutionContext.ExpressionValues["env"] as DictionaryContextData;
|
||||
#else
|
||||
var envContext = ExecutionContext.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
// Apply environment from env context, env context contains job level env and action's evn block
|
||||
foreach (var env in envContext)
|
||||
{
|
||||
environment[env.Key] = env.Value ?? string.Empty;
|
||||
}
|
||||
|
||||
// Apply action's env block later.
|
||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(Action.Environment, ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var env in actionEnvironment)
|
||||
{
|
||||
environment[env.Key] = env.Value ?? string.Empty;
|
||||
environment[env.Key] = env.Value.ToString();
|
||||
}
|
||||
|
||||
// Apply action's intra-action state at last
|
||||
@@ -297,8 +293,7 @@ namespace GitHub.Runner.Worker
|
||||
return displayName;
|
||||
}
|
||||
// Try evaluating fully
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
var templateEvaluator = new PipelineTemplateEvaluator(context.ToTemplateTraceWriter(), schema);
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
try
|
||||
{
|
||||
didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName);
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
@@ -19,7 +19,6 @@ namespace GitHub.Runner.Worker.Container
|
||||
|
||||
public ContainerInfo()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null)
|
||||
@@ -64,6 +63,8 @@ namespace GitHub.Runner.Worker.Container
|
||||
UserMountVolumes[volume] = volume;
|
||||
}
|
||||
}
|
||||
|
||||
UpdateWebProxyEnv(hostContext.WebProxy);
|
||||
}
|
||||
|
||||
public string ContainerId { get; set; }
|
||||
@@ -223,6 +224,26 @@ namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
_pathMappings.Insert(0, new PathMapping(hostCommonPath, containerCommonPath));
|
||||
}
|
||||
|
||||
private void UpdateWebProxyEnv(RunnerWebProxy webProxy)
|
||||
{
|
||||
// Set common forms of proxy variables if configured in Runner and not set directly by container.env
|
||||
if (!String.IsNullOrEmpty(webProxy.HttpProxyAddress))
|
||||
{
|
||||
ContainerEnvironmentVariables.TryAdd("HTTP_PROXY", webProxy.HttpProxyAddress);
|
||||
ContainerEnvironmentVariables.TryAdd("http_proxy", webProxy.HttpProxyAddress);
|
||||
}
|
||||
if (!String.IsNullOrEmpty(webProxy.HttpsProxyAddress))
|
||||
{
|
||||
ContainerEnvironmentVariables.TryAdd("HTTPS_PROXY", webProxy.HttpsProxyAddress);
|
||||
ContainerEnvironmentVariables.TryAdd("https_proxy", webProxy.HttpsProxyAddress);
|
||||
}
|
||||
if (!String.IsNullOrEmpty(webProxy.NoProxyString))
|
||||
{
|
||||
ContainerEnvironmentVariables.TryAdd("NO_PROXY", webProxy.NoProxyString);
|
||||
ContainerEnvironmentVariables.TryAdd("no_proxy", webProxy.NoProxyString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class MountVolume
|
||||
|
||||
@@ -11,6 +11,7 @@ using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Common;
|
||||
@@ -20,6 +21,7 @@ using System.Text;
|
||||
using System.Collections;
|
||||
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -37,6 +39,7 @@ namespace GitHub.Runner.Worker
|
||||
string ContextName { get; }
|
||||
Task ForceCompleted { get; }
|
||||
TaskResult? Result { get; set; }
|
||||
TaskResult? Outcome { get; set; }
|
||||
string ResultCode { get; set; }
|
||||
TaskResult? CommandResult { get; set; }
|
||||
CancellationToken CancellationToken { get; }
|
||||
@@ -45,9 +48,11 @@ namespace GitHub.Runner.Worker
|
||||
PlanFeatures Features { get; }
|
||||
Variables Variables { get; }
|
||||
Dictionary<string, string> IntraActionState { get; }
|
||||
HashSet<string> OutputVariables { get; }
|
||||
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
|
||||
Dictionary<string, VariableValue> JobOutputs { get; }
|
||||
IDictionary<String, String> EnvironmentVariables { get; }
|
||||
IDictionary<String, ContextScope> Scopes { get; }
|
||||
IList<String> FileTable { get; }
|
||||
StepsContext StepsContext { get; }
|
||||
DictionaryContextData ExpressionValues { get; }
|
||||
List<string> PrependPath { get; }
|
||||
@@ -107,7 +112,6 @@ namespace GitHub.Runner.Worker
|
||||
private readonly TimelineRecord _record = new TimelineRecord();
|
||||
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
|
||||
private readonly object _loggerLock = new object();
|
||||
private readonly HashSet<string> _outputvariables = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly object _matchersLock = new object();
|
||||
|
||||
private event OnMatcherChanged _onMatcherChanged;
|
||||
@@ -137,9 +141,11 @@ namespace GitHub.Runner.Worker
|
||||
public List<ServiceEndpoint> Endpoints { get; private set; }
|
||||
public Variables Variables { get; private set; }
|
||||
public Dictionary<string, string> IntraActionState { get; private set; }
|
||||
public HashSet<string> OutputVariables => _outputvariables;
|
||||
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
|
||||
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
|
||||
public IDictionary<String, String> EnvironmentVariables { get; private set; }
|
||||
public IDictionary<String, ContextScope> Scopes { get; private set; }
|
||||
public IList<String> FileTable { get; private set; }
|
||||
public StepsContext StepsContext { get; private set; }
|
||||
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
||||
public bool WriteDebug { get; private set; }
|
||||
@@ -168,6 +174,8 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
public TaskResult? Outcome { get; set; }
|
||||
|
||||
public TaskResult? CommandResult { get; set; }
|
||||
|
||||
private string ContextType => _record.RecordType;
|
||||
@@ -264,7 +272,9 @@ namespace GitHub.Runner.Worker
|
||||
child.IntraActionState = intraActionState;
|
||||
}
|
||||
child.EnvironmentVariables = EnvironmentVariables;
|
||||
child.JobDefaults = JobDefaults;
|
||||
child.Scopes = Scopes;
|
||||
child.FileTable = FileTable;
|
||||
child.StepsContext = StepsContext;
|
||||
foreach (var pair in ExpressionValues)
|
||||
{
|
||||
@@ -342,6 +352,12 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
_logger.End();
|
||||
|
||||
if (!string.IsNullOrEmpty(ContextName))
|
||||
{
|
||||
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult().ToString());
|
||||
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult().ToString());
|
||||
}
|
||||
|
||||
return Result.Value;
|
||||
}
|
||||
|
||||
@@ -552,6 +568,12 @@ namespace GitHub.Runner.Worker
|
||||
// Environment variables shared across all actions
|
||||
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Job defaults shared across all actions
|
||||
JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Job Outputs
|
||||
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Service container info
|
||||
ServiceContainers = new List<ContainerInfo>();
|
||||
|
||||
@@ -568,6 +590,15 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// File table
|
||||
FileTable = new List<String>(message.FileTable ?? new string[0]);
|
||||
|
||||
// Expression functions
|
||||
if (Variables.GetBoolean("System.HashFilesV2") == true)
|
||||
{
|
||||
ExpressionConstants.UpdateFunction<Handlers.HashFiles>("hashFiles", 1, byte.MaxValue);
|
||||
}
|
||||
|
||||
// Expression values
|
||||
if (message.ContextData?.Count > 0)
|
||||
{
|
||||
@@ -585,8 +616,13 @@ namespace GitHub.Runner.Worker
|
||||
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
|
||||
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodedToken);
|
||||
var githubJob = Variables.Get("system.github.job");
|
||||
var githubContext = new GitHubContext();
|
||||
githubContext["token"] = githubAccessToken;
|
||||
if (!string.IsNullOrEmpty(githubJob))
|
||||
{
|
||||
githubContext["job"] = new StringContextData(githubJob);
|
||||
}
|
||||
var githubDictionary = ExpressionValues["github"].AssertDictionary("github");
|
||||
foreach (var pair in githubDictionary)
|
||||
{
|
||||
@@ -722,7 +758,7 @@ namespace GitHub.Runner.Worker
|
||||
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
|
||||
var joinedOwners = string.Join(", ", owners);
|
||||
// todo: loc
|
||||
this.Output($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
|
||||
this.Debug($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -764,7 +800,7 @@ namespace GitHub.Runner.Worker
|
||||
owners = removedMatchers.Select(x => $"'{x.Owner}'");
|
||||
var joinedOwners = string.Join(", ", owners);
|
||||
// todo: loc
|
||||
this.Output($"Removed matchers: {joinedOwners}");
|
||||
this.Debug($"Removed matchers: {joinedOwners}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -879,6 +915,13 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context)
|
||||
{
|
||||
var templateTrace = context.ToTemplateTraceWriter();
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
return new PipelineTemplateEvaluator(templateTrace, schema, context.FileTable);
|
||||
}
|
||||
|
||||
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
|
||||
{
|
||||
return new TemplateTraceWriter(context);
|
||||
|
||||
126
src/Runner.Worker/ExpressionFunctions/HashFiles.cs
Normal file
126
src/Runner.Worker/ExpressionFunctions/HashFiles.cs
Normal file
@@ -0,0 +1,126 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Reflection;
|
||||
using System.Threading;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
public class FunctionTrace : ITraceWriter
|
||||
{
|
||||
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
|
||||
|
||||
public FunctionTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
|
||||
{
|
||||
_trace = trace;
|
||||
}
|
||||
public void Info(string message)
|
||||
{
|
||||
_trace.Info(message);
|
||||
}
|
||||
|
||||
public void Verbose(string message)
|
||||
{
|
||||
_trace.Info(message);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class HashFiles : Function
|
||||
{
|
||||
protected sealed override Object EvaluateCore(
|
||||
EvaluationContext context,
|
||||
out ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var templateContext = context.State as DistributedTask.ObjectTemplating.TemplateContext;
|
||||
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData);
|
||||
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
||||
var githubContext = githubContextData as DictionaryContextData;
|
||||
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace);
|
||||
var workspaceData = workspace as StringContextData;
|
||||
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
||||
|
||||
string githubWorkspace = workspaceData.Value;
|
||||
bool followSymlink = false;
|
||||
List<string> patterns = new List<string>();
|
||||
var firstParameter = true;
|
||||
foreach (var parameter in Parameters)
|
||||
{
|
||||
var parameterString = parameter.Evaluate(context).ConvertToString();
|
||||
if (firstParameter)
|
||||
{
|
||||
firstParameter = false;
|
||||
if (parameterString.StartsWith("--"))
|
||||
{
|
||||
if (string.Equals(parameterString, "--follow-symbolic-links", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
followSymlink = true;
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentOutOfRangeException($"Invalid glob option {parameterString}, avaliable option: '--follow-symbolic-links'.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patterns.Add(parameterString);
|
||||
}
|
||||
|
||||
context.Trace.Info($"Search root directory: '{githubWorkspace}'");
|
||||
context.Trace.Info($"Search pattern: '{string.Join(", ", patterns)}'");
|
||||
|
||||
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||
|
||||
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
var p = new ProcessInvoker(new FunctionTrace(context.Trace));
|
||||
p.ErrorDataReceived += ((_, data) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||
{
|
||||
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||
context.Trace.Info($"Hash result: '{hashResult}'");
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Trace.Info(data.Data);
|
||||
}
|
||||
});
|
||||
|
||||
p.OutputDataReceived += ((_, data) =>
|
||||
{
|
||||
context.Trace.Info(data.Data);
|
||||
});
|
||||
|
||||
var env = new Dictionary<string, string>();
|
||||
if (followSymlink)
|
||||
{
|
||||
env["followSymbolicLinks"] = "true";
|
||||
}
|
||||
env["patterns"] = string.Join(Environment.NewLine, patterns);
|
||||
|
||||
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
|
||||
fileName: node,
|
||||
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||
environment: env,
|
||||
requireExitCodeZero: false,
|
||||
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
|
||||
|
||||
if (exitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
|
||||
}
|
||||
|
||||
return hashResult;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ namespace GitHub.Runner.Worker
|
||||
"event_name",
|
||||
"event_path",
|
||||
"head_ref",
|
||||
"job",
|
||||
"ref",
|
||||
"repository",
|
||||
"run_id",
|
||||
|
||||
@@ -189,13 +189,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
container.ContainerEnvironmentVariables[variable.Key] = container.TranslateToContainerPath(variable.Value);
|
||||
}
|
||||
|
||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager, container))
|
||||
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager, container))
|
||||
{
|
||||
var runExitCode = await dockerManger.DockerRun(ExecutionContext, container, stdoutManager.OnDataReceived, stderrManager.OnDataReceived);
|
||||
ExecutionContext.Debug($"Docker Action run completed with exit code {runExitCode}");
|
||||
if (runExitCode != 0)
|
||||
{
|
||||
ExecutionContext.Error($"Docker run failed with exit code {runExitCode}");
|
||||
ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,9 +122,9 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
else
|
||||
{
|
||||
var exitCode = await step;
|
||||
ExecutionContext.Debug($"Node Action run completed with exit code {exitCode}");
|
||||
if (exitCode != 0)
|
||||
{
|
||||
ExecutionContext.Error($"Node run failed with exit code {exitCode}");
|
||||
ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using DTWebApi = GitHub.DistributedTask.WebApi;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
@@ -17,6 +18,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
private const string _timeoutKey = "GITHUB_ACTIONS_RUNNER_ISSUE_MATCHER_TIMEOUT";
|
||||
private static readonly Regex _colorCodeRegex = new Regex(@"\x0033\[[0-9;]*m?", RegexOptions.Compiled | RegexOptions.CultureInvariant);
|
||||
private readonly IActionCommandManager _commandManager;
|
||||
private readonly ContainerInfo _container;
|
||||
private readonly IExecutionContext _executionContext;
|
||||
private readonly int _failsafe = 50;
|
||||
private readonly object _matchersLock = new object();
|
||||
@@ -25,10 +27,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Mapping that indicates whether a directory belongs to the workflow repository
|
||||
private readonly Dictionary<string, string> _directoryMap = new Dictionary<string, string>();
|
||||
|
||||
public OutputManager(IExecutionContext executionContext, IActionCommandManager commandManager)
|
||||
public OutputManager(IExecutionContext executionContext, IActionCommandManager commandManager, ContainerInfo container = null)
|
||||
{
|
||||
_executionContext = executionContext;
|
||||
_commandManager = commandManager;
|
||||
_container = container ?? executionContext.Container;
|
||||
|
||||
// Recursion failsafe (test override)
|
||||
var failsafeString = Environment.GetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE");
|
||||
@@ -81,7 +84,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
// This does not need to be inside of a critical section.
|
||||
// The logging queues and command handlers are thread-safe.
|
||||
if (_commandManager.TryProcessCommand(_executionContext, line))
|
||||
if (_commandManager.TryProcessCommand(_executionContext, line, _container))
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -257,6 +260,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
if (!string.IsNullOrWhiteSpace(match.File))
|
||||
{
|
||||
var file = match.File;
|
||||
var translate = _container != null;
|
||||
|
||||
// Root using fromPath
|
||||
if (!string.IsNullOrWhiteSpace(match.FromPath) && !Path.IsPathFullyQualified(file))
|
||||
@@ -275,11 +279,19 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
ArgUtil.NotNullOrEmpty(workspace, "workspace");
|
||||
|
||||
file = Path.Combine(workspace, file);
|
||||
translate = false;
|
||||
}
|
||||
|
||||
// Remove relative pathing and normalize slashes
|
||||
file = Path.GetFullPath(file);
|
||||
|
||||
// Translate to host
|
||||
if (translate)
|
||||
{
|
||||
file = _container.TranslateToHostPath(file);
|
||||
file = Path.GetFullPath(file);
|
||||
}
|
||||
|
||||
// Check whether the file exists
|
||||
if (File.Exists(file))
|
||||
{
|
||||
|
||||
@@ -58,12 +58,21 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
string shellCommandPath = null;
|
||||
bool validateShellOnHost = !(StepHost is ContainerStepHost);
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
|
||||
Inputs.TryGetValue("shell", out var shell);
|
||||
string shell = null;
|
||||
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
runDefaults.TryGetValue("shell", out shell);
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(shell))
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
shellCommand = "pwsh";
|
||||
if(validateShellOnHost)
|
||||
if (validateShellOnHost)
|
||||
{
|
||||
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
|
||||
if (string.IsNullOrEmpty(shellCommandPath))
|
||||
@@ -139,11 +148,36 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
Inputs.TryGetValue("script", out var contents);
|
||||
contents = contents ?? string.Empty;
|
||||
|
||||
Inputs.TryGetValue("workingDirectory", out var workingDirectory);
|
||||
string workingDirectory = null;
|
||||
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
|
||||
{
|
||||
ExecutionContext.Debug("Overwrite 'working-directory' base on job defaults.");
|
||||
}
|
||||
}
|
||||
}
|
||||
var workspaceDir = githubContext["workspace"] as StringContextData;
|
||||
workingDirectory = Path.Combine(workspaceDir, workingDirectory ?? string.Empty);
|
||||
|
||||
Inputs.TryGetValue("shell", out var shell);
|
||||
string shell = null;
|
||||
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
if (runDefaults.TryGetValue("shell", out shell))
|
||||
{
|
||||
ExecutionContext.Debug("Overwrite 'shell' base on job defaults.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var isContainerStepHost = StepHost is ContainerStepHost;
|
||||
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
|
||||
|
||||
@@ -3,8 +3,11 @@ using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
@@ -14,6 +17,16 @@ using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[DataContract]
|
||||
public class SetupInfo
|
||||
{
|
||||
[DataMember]
|
||||
public string Group { get; set; }
|
||||
|
||||
[DataMember]
|
||||
public string Detail { get; set; }
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(JobExtension))]
|
||||
|
||||
public interface IJobExtension : IRunnerService
|
||||
@@ -49,6 +62,44 @@ namespace GitHub.Runner.Worker
|
||||
context.Start();
|
||||
context.Debug($"Starting: Set up job");
|
||||
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
|
||||
|
||||
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
|
||||
if (File.Exists(setupInfoFile))
|
||||
{
|
||||
Trace.Info($"Load machine setup info from {setupInfoFile}");
|
||||
try
|
||||
{
|
||||
var setupInfo = IOUtil.LoadObject<List<SetupInfo>>(setupInfoFile);
|
||||
if (setupInfo?.Count > 0)
|
||||
{
|
||||
foreach (var info in setupInfo)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(info?.Detail))
|
||||
{
|
||||
var groupName = info.Group;
|
||||
if (string.IsNullOrEmpty(groupName))
|
||||
{
|
||||
groupName = "Machine Setup Info";
|
||||
}
|
||||
|
||||
context.Output($"##[group]{groupName}");
|
||||
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
|
||||
foreach (var line in multiLines)
|
||||
{
|
||||
context.Output(line);
|
||||
}
|
||||
context.Output("##[endgroup]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Output($"Fail to load and print machine setup info: {ex.Message}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
var repoFullName = context.GetGitHubContext("repository");
|
||||
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
|
||||
context.Debug($"Primary repository: {repoFullName}");
|
||||
@@ -78,9 +129,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Evaluate the job-level environment variables
|
||||
context.Debug("Evaluating job-level environment variables");
|
||||
var templateTrace = context.ToTemplateTraceWriter();
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
foreach (var token in message.EnvironmentVariables)
|
||||
{
|
||||
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
|
||||
@@ -112,6 +161,26 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate the job defaults
|
||||
context.Debug("Evaluating job defaults");
|
||||
foreach (var token in message.Defaults)
|
||||
{
|
||||
var defaults = token.AssertMapping("defaults");
|
||||
if (defaults.Any(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
context.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
var defaultsRun = defaults.First(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
|
||||
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues);
|
||||
foreach (var pair in jobDefaults)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pair.Value))
|
||||
{
|
||||
context.JobDefaults["run"][pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build up 2 lists of steps, pre-job, job
|
||||
// Download actions not already in the cache
|
||||
Trace.Info("Downloading actions");
|
||||
@@ -244,6 +313,58 @@ namespace GitHub.Runner.Worker
|
||||
context.Start();
|
||||
context.Debug("Starting: Complete job");
|
||||
|
||||
// Evaluate job outputs
|
||||
if (message.JobOutputs != null && message.JobOutputs.Type != TokenType.Null)
|
||||
{
|
||||
try
|
||||
{
|
||||
context.Output($"Evaluate and set job outputs");
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for evaluating job outputs");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
context.ExpressionValues["env"] = envContext;
|
||||
foreach (var pair in context.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
Trace.Info("Initialize steps context for evaluating job outputs");
|
||||
context.ExpressionValues["steps"] = context.StepsContext.GetScope(context.ScopeName);
|
||||
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var outputs = templateEvaluator.EvaluateJobOutput(message.JobOutputs, context.ExpressionValues);
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
if (string.IsNullOrEmpty(output.Value))
|
||||
{
|
||||
context.Debug($"Skip output '{output.Key}' since it's empty");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!string.Equals(output.Value, HostContext.SecretMasker.MaskSecrets(output.Value)))
|
||||
{
|
||||
context.Warning($"Skip output '{output.Key}' since it may contain secret.");
|
||||
continue;
|
||||
}
|
||||
|
||||
context.Output($"Set output '{output.Key}'");
|
||||
jobContext.JobOutputs[output.Key] = output.Value;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Result = TaskResult.Failed;
|
||||
context.Error($"Fail to evaluate job outputs");
|
||||
context.Error(ex);
|
||||
jobContext.Result = TaskResultUtil.MergeTaskResults(jobContext.Result, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
|
||||
{
|
||||
Trace.Info("Support log upload starting.");
|
||||
|
||||
@@ -107,6 +107,11 @@ namespace GitHub.Runner.Worker
|
||||
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
|
||||
if (jobContext.WriteDebug)
|
||||
{
|
||||
jobContext.SetRunnerContext("debug", "1");
|
||||
}
|
||||
|
||||
jobContext.SetRunnerContext("os", VarUtil.OS);
|
||||
|
||||
string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools);
|
||||
@@ -226,7 +231,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
Trace.Info("Raising job completed event.");
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result);
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs);
|
||||
|
||||
var completeJobRetryLimit = 5;
|
||||
var exceptions = new List<Exception>();
|
||||
|
||||
@@ -56,13 +56,22 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
public void SetResult(
|
||||
public void SetConclusion(
|
||||
string scopeName,
|
||||
string stepName,
|
||||
string result)
|
||||
string conclusion)
|
||||
{
|
||||
var step = GetStep(scopeName, stepName);
|
||||
step["result"] = new StringContextData(result);
|
||||
step["conclusion"] = new StringContextData(conclusion);
|
||||
}
|
||||
|
||||
public void SetOutcome(
|
||||
string scopeName,
|
||||
string stepName,
|
||||
string outcome)
|
||||
{
|
||||
var step = GetStep(scopeName, stepName);
|
||||
step["outcome"] = new StringContextData(outcome);
|
||||
}
|
||||
|
||||
private DictionaryContextData GetStep(string scopeName, string stepName)
|
||||
|
||||
@@ -76,15 +76,36 @@ namespace GitHub.Runner.Worker
|
||||
// Start
|
||||
step.ExecutionContext.Start();
|
||||
|
||||
// Set GITHUB_ACTION
|
||||
if (step is IActionRunner actionStep)
|
||||
{
|
||||
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
|
||||
}
|
||||
|
||||
// Initialize scope
|
||||
if (InitializeScope(step, scopeInputs))
|
||||
{
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for step");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
if (step is IActionRunner actionStep)
|
||||
{
|
||||
// Set GITHUB_ACTION
|
||||
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
|
||||
|
||||
// Evaluate and merge action's env block to env context
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var env in actionEnvironment)
|
||||
{
|
||||
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
|
||||
}
|
||||
}
|
||||
|
||||
var expressionManager = HostContext.GetService<IExpressionManager>();
|
||||
try
|
||||
{
|
||||
@@ -224,7 +245,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Set the timeout
|
||||
var timeoutMinutes = 0;
|
||||
var templateEvaluator = CreateTemplateEvaluator(step.ExecutionContext);
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
try
|
||||
{
|
||||
timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues);
|
||||
@@ -330,6 +351,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (continueOnError)
|
||||
{
|
||||
step.ExecutionContext.Outcome = step.ExecutionContext.Result;
|
||||
step.ExecutionContext.Result = TaskResult.Succeeded;
|
||||
Trace.Info($"Updated step result (continue on error)");
|
||||
}
|
||||
@@ -366,7 +388,7 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Debug($"Initializing scope '{scope.Name}'");
|
||||
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
|
||||
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
|
||||
var templateEvaluator = CreateTemplateEvaluator(executionContext);
|
||||
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
|
||||
var inputs = default(DictionaryContextData);
|
||||
try
|
||||
{
|
||||
@@ -422,7 +444,7 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Debug($"Finalizing scope '{scope.Name}'");
|
||||
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name);
|
||||
executionContext.ExpressionValues["inputs"] = null;
|
||||
var templateEvaluator = CreateTemplateEvaluator(executionContext);
|
||||
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
|
||||
var outputs = default(DictionaryContextData);
|
||||
try
|
||||
{
|
||||
@@ -454,12 +476,5 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
executionContext.Complete(result, resultCode: resultCode);
|
||||
}
|
||||
|
||||
private PipelineTemplateEvaluator CreateTemplateEvaluator(IExecutionContext executionContext)
|
||||
{
|
||||
var templateTrace = executionContext.ToTemplateTraceWriter();
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
return new PipelineTemplateEvaluator(templateTrace, schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using GitHub.Build.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// DO NOT add file path variable to here.
|
||||
// All file path variables needs to be retrive and set through ExecutionContext, so it can handle container file path translation.
|
||||
public string Build_Number => Get(BuildVariables.BuildNumber);
|
||||
public string Build_Number => Get(SdkConstants.Variables.Build.BuildNumber);
|
||||
|
||||
#if OS_WINDOWS
|
||||
public bool Retain_Default_Encoding => false;
|
||||
|
||||
@@ -90,10 +90,8 @@
|
||||
"github",
|
||||
"strategy",
|
||||
"matrix",
|
||||
"steps",
|
||||
"job",
|
||||
"runner",
|
||||
"env"
|
||||
"runner"
|
||||
],
|
||||
"string": {}
|
||||
},
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
using System;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
public static class ArtifactResourceTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Build container reference
|
||||
/// E.g. #/2121/drop
|
||||
/// </summary>
|
||||
public const String Container = "Container";
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.WebApi.Patch;
|
||||
using GitHub.Services.WebApi.Patch.Json;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
public class BuildHttpClient : BuildHttpClientBase
|
||||
{
|
||||
static BuildHttpClient()
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClient(
|
||||
Uri baseUrl,
|
||||
VssCredentials credentials)
|
||||
: base(baseUrl, credentials)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClient(
|
||||
Uri baseUrl,
|
||||
VssCredentials credentials,
|
||||
VssHttpRequestSettings settings)
|
||||
: base(baseUrl, credentials, settings)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClient(
|
||||
Uri baseUrl,
|
||||
VssCredentials credentials,
|
||||
params DelegatingHandler[] handlers)
|
||||
: base(baseUrl, credentials, handlers)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClient(
|
||||
Uri baseUrl,
|
||||
VssCredentials credentials,
|
||||
VssHttpRequestSettings settings,
|
||||
params DelegatingHandler[] handlers)
|
||||
: base(baseUrl, credentials, settings, handlers)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClient(
|
||||
Uri baseUrl,
|
||||
HttpMessageHandler pipeline,
|
||||
Boolean disposeHandler)
|
||||
: base(baseUrl, pipeline, disposeHandler)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
public static class BuildResourceIds
|
||||
{
|
||||
public const String AreaId = "5D6898BB-45EC-463F-95F9-54D49C71752E";
|
||||
public const String AreaName = "build";
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
public static class BuildVariables
|
||||
{
|
||||
public const String TeamProjectId = "system.teamProjectId";
|
||||
|
||||
public const String BuildId = "build.buildId";
|
||||
public const String BuildNumber = "build.buildNumber";
|
||||
public const String ContainerId = "build.containerId";
|
||||
}
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
[DataContract]
|
||||
public class ArtifactResource : BaseSecuredObject
|
||||
{
|
||||
public ArtifactResource()
|
||||
{
|
||||
}
|
||||
|
||||
public ArtifactResource(
|
||||
ISecuredObject securedObject)
|
||||
: base(securedObject)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The type of the resource: File container, version control folder, UNC path, etc.
|
||||
/// </summary>
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public String Type
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type-specific data about the artifact.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// For example, "#/10002/5/drop", "$/drops/5", "\\myshare\myfolder\mydrops\5"
|
||||
/// </remarks>
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public String Data
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type-specific properties of the artifact.
|
||||
/// </summary>
|
||||
[DataMember(IsRequired = false, EmitDefaultValue = false)]
|
||||
public Dictionary<String, String> Properties
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
/// <summary>
|
||||
/// Represents an artifact produced by a build.
|
||||
/// </summary>
|
||||
[DataContract]
|
||||
public class BuildArtifact : BaseSecuredObject
|
||||
{
|
||||
public BuildArtifact()
|
||||
{
|
||||
}
|
||||
|
||||
internal BuildArtifact(
|
||||
ISecuredObject securedObject)
|
||||
: base(securedObject)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The artifact ID.
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public Int32 Id
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The name of the artifact.
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public String Name
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The artifact source, which will be the ID of the job that produced this artifact.
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public String Source
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The actual resource.
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public ArtifactResource Resource
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,107 +0,0 @@
|
||||
/*
|
||||
* ---------------------------------------------------------
|
||||
* Copyright(C) Microsoft Corporation. All rights reserved.
|
||||
* ---------------------------------------------------------
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
|
||||
namespace GitHub.Build.WebApi
|
||||
{
|
||||
[ResourceArea(BuildResourceIds.AreaId)]
|
||||
public abstract class BuildHttpClientBase : VssHttpClientBase
|
||||
{
|
||||
public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials)
|
||||
: base(baseUrl, credentials)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings)
|
||||
: base(baseUrl, credentials, settings)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers)
|
||||
: base(baseUrl, credentials, handlers)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers)
|
||||
: base(baseUrl, credentials, settings, handlers)
|
||||
{
|
||||
}
|
||||
|
||||
public BuildHttpClientBase(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler)
|
||||
: base(baseUrl, pipeline, disposeHandler)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// [Preview API] Associates an artifact with a build.
|
||||
/// </summary>
|
||||
/// <param name="artifact">The artifact.</param>
|
||||
/// <param name="project">Project ID</param>
|
||||
/// <param name="buildId">The ID of the build.</param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
public virtual Task<BuildArtifact> CreateArtifactAsync(
|
||||
BuildArtifact artifact,
|
||||
Guid project,
|
||||
int buildId,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
HttpMethod httpMethod = new HttpMethod("POST");
|
||||
Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984");
|
||||
object routeValues = new { project = project, buildId = buildId };
|
||||
HttpContent content = new ObjectContent<BuildArtifact>(artifact, new VssJsonMediaTypeFormatter(true));
|
||||
|
||||
return SendAsync<BuildArtifact>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(5.2, 5),
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken,
|
||||
content: content);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// [Preview API] Gets a specific artifact for a build.
|
||||
/// </summary>
|
||||
/// <param name="project">Project ID</param>
|
||||
/// <param name="buildId">The ID of the build.</param>
|
||||
/// <param name="artifactName">The name of the artifact.</param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
public virtual Task<BuildArtifact> GetArtifactAsync(
|
||||
Guid project,
|
||||
int buildId,
|
||||
string artifactName,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
HttpMethod httpMethod = new HttpMethod("GET");
|
||||
Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984");
|
||||
object routeValues = new { project = project, buildId = buildId };
|
||||
|
||||
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
|
||||
queryParams.Add("artifactName", artifactName);
|
||||
|
||||
return SendAsync<BuildArtifact>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(5.2, 5),
|
||||
queryParameters: queryParams,
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ using GitHub.DistributedTask.Expressions2.Sdk.Functions;
|
||||
|
||||
namespace GitHub.DistributedTask.Expressions2
|
||||
{
|
||||
internal static class ExpressionConstants
|
||||
public static class ExpressionConstants
|
||||
{
|
||||
static ExpressionConstants()
|
||||
{
|
||||
@@ -15,6 +15,7 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
AddFunction<Join>("join", 1, 2);
|
||||
AddFunction<StartsWith>("startsWith", 2, 2);
|
||||
AddFunction<ToJson>("toJson", 1, 1);
|
||||
AddFunction<FromJson>("fromJson", 1, 1);
|
||||
AddFunction<HashFiles>("hashFiles", 1, 1);
|
||||
}
|
||||
|
||||
@@ -24,6 +25,12 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
WellKnownFunctions.Add(name, new FunctionInfo<T>(name, minParameters, maxParameters));
|
||||
}
|
||||
|
||||
public static void UpdateFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
|
||||
where T : Function, new()
|
||||
{
|
||||
WellKnownFunctions[name] = new FunctionInfo<T>(name, minParameters, maxParameters);
|
||||
}
|
||||
|
||||
internal static readonly String False = "false";
|
||||
internal static readonly String Infinity = "Infinity";
|
||||
internal static readonly Int32 MaxDepth = 50;
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
|
||||
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
|
||||
{
|
||||
internal sealed class FromJson : Function
|
||||
{
|
||||
protected sealed override Object EvaluateCore(
|
||||
EvaluationContext context,
|
||||
out ResultMemory resultMemory)
|
||||
{
|
||||
resultMemory = null;
|
||||
var json = Parameters[0].Evaluate(context).ConvertToString();
|
||||
using (var stringReader = new StringReader(json))
|
||||
using (var jsonReader = new JsonTextReader(stringReader) { DateParseHandling = DateParseHandling.None, FloatParseHandling = FloatParseHandling.Double })
|
||||
{
|
||||
var token = JToken.ReadFrom(jsonReader);
|
||||
return token.ToPipelineContextData();
|
||||
}
|
||||
}
|
||||
}}
|
||||
@@ -779,5 +779,65 @@ namespace GitHub.DistributedTask.WebApi
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// [Preview API]
|
||||
/// </summary>
|
||||
/// <param name="poolId"></param>
|
||||
/// <param name="agentId"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
public Task<String> GetAgentAuthUrlAsync(
|
||||
int poolId,
|
||||
int agentId,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
HttpMethod httpMethod = new HttpMethod("GET");
|
||||
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
|
||||
object routeValues = new { poolId = poolId, agentId = agentId };
|
||||
|
||||
return SendAsync<String>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(6.0, 1),
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// [Preview API]
|
||||
/// </summary>
|
||||
/// <param name="poolId"></param>
|
||||
/// <param name="agentId"></param>
|
||||
/// <param name="error"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
public virtual async Task ReportAgentAuthUrlMigrationErrorAsync(
|
||||
int poolId,
|
||||
int agentId,
|
||||
string error,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
HttpMethod httpMethod = new HttpMethod("POST");
|
||||
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
|
||||
object routeValues = new { poolId = poolId, agentId = agentId };
|
||||
HttpContent content = new ObjectContent<string>(error, new VssJsonMediaTypeFormatter(true));
|
||||
|
||||
using (HttpResponseMessage response = await SendAsync(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(6.0, 1),
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken,
|
||||
content: content).ConfigureAwait(false))
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,8 +30,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
foreach (var propertiesPair in properties)
|
||||
{
|
||||
var propertyName = propertiesPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} key");
|
||||
var propertyValue = propertiesPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} value");
|
||||
Properties.Add(propertyName.Value, new PropertyValue(propertyValue.Value));
|
||||
Properties.Add(propertyName.Value, new PropertyValue(propertiesPair.Value));
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -85,7 +84,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined");
|
||||
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined on '{name}'");
|
||||
}
|
||||
}
|
||||
// Otherwise validate loose value type not be defined
|
||||
@@ -95,9 +94,14 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
}
|
||||
|
||||
// Lookup each property
|
||||
foreach (var property in Properties.Values)
|
||||
foreach (var property in Properties)
|
||||
{
|
||||
schema.GetDefinition(property.Type);
|
||||
if (String.IsNullOrEmpty(property.Value.Type))
|
||||
{
|
||||
throw new ArgumentException($"Type not specified for the '{property.Key}' property on the '{name}' type");
|
||||
}
|
||||
|
||||
schema.GetDefinition(property.Value.Type);
|
||||
}
|
||||
|
||||
if (!String.IsNullOrEmpty(Inherits))
|
||||
|
||||
@@ -1,18 +1,40 @@
|
||||
using System;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
|
||||
namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
{
|
||||
internal sealed class PropertyValue
|
||||
{
|
||||
internal PropertyValue()
|
||||
internal PropertyValue(TemplateToken token)
|
||||
{
|
||||
}
|
||||
|
||||
internal PropertyValue(String type)
|
||||
{
|
||||
Type = type;
|
||||
if (token is StringToken stringToken)
|
||||
{
|
||||
Type = stringToken.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
var mapping = token.AssertMapping($"{TemplateConstants.MappingPropertyValue}");
|
||||
foreach (var mappingPair in mapping)
|
||||
{
|
||||
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.MappingPropertyValue} key");
|
||||
switch (mappingKey.Value)
|
||||
{
|
||||
case TemplateConstants.Type:
|
||||
Type = mappingPair.Value.AssertString($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Type}").Value;
|
||||
break;
|
||||
case TemplateConstants.Required:
|
||||
Required = mappingPair.Value.AssertBoolean($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Required}").Value;
|
||||
break;
|
||||
default:
|
||||
mappingKey.AssertUnexpectedValue($"{TemplateConstants.MappingPropertyValue} key");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal String Type { get; set; }
|
||||
|
||||
internal Boolean Required { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,8 +312,8 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
|
||||
// template-schema
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(TemplateConstants.NonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(TemplateConstants.Definitions));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Definitions)));
|
||||
schema.Definitions.Add(TemplateConstants.TemplateSchema, mappingDefinition);
|
||||
|
||||
// definitions
|
||||
@@ -335,9 +335,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
|
||||
// null-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(TemplateConstants.NullDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NullDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.NullDefinition, mappingDefinition);
|
||||
|
||||
// null-definition-properties
|
||||
@@ -346,9 +346,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
|
||||
// boolean-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(TemplateConstants.BooleanDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(new StringToken(null, null, null, TemplateConstants.BooleanDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.BooleanDefinition, mappingDefinition);
|
||||
|
||||
// boolean-definition-properties
|
||||
@@ -357,9 +357,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
|
||||
// number-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(TemplateConstants.NumberDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NumberDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.NumberDefinition, mappingDefinition);
|
||||
|
||||
// number-definition-properties
|
||||
@@ -368,55 +368,68 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
|
||||
// string-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(TemplateConstants.StringDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(new StringToken(null, null, null, TemplateConstants.StringDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.StringDefinition, mappingDefinition);
|
||||
|
||||
// string-definition-properties
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(TemplateConstants.NonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(TemplateConstants.Boolean));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(TemplateConstants.Boolean));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(new StringToken(null, null, null,TemplateConstants.Boolean)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
|
||||
schema.Definitions.Add(TemplateConstants.StringDefinitionProperties, mappingDefinition);
|
||||
|
||||
// sequence-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(TemplateConstants.SequenceDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.SequenceDefinition, mappingDefinition);
|
||||
|
||||
// sequence-definition-properties
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(TemplateConstants.NonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
schema.Definitions.Add(TemplateConstants.SequenceDefinitionProperties, mappingDefinition);
|
||||
|
||||
// mapping-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(TemplateConstants.MappingDefinitionProperties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(new StringToken(null, null, null, TemplateConstants.MappingDefinitionProperties)));
|
||||
schema.Definitions.Add(TemplateConstants.MappingDefinition, mappingDefinition);
|
||||
|
||||
// mapping-definition-properties
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(TemplateConstants.Properties));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(TemplateConstants.NonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(TemplateConstants.NonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Properties)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
schema.Definitions.Add(TemplateConstants.MappingDefinitionProperties, mappingDefinition);
|
||||
|
||||
// properties
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString;
|
||||
mappingDefinition.LooseValueType = TemplateConstants.NonEmptyString;
|
||||
mappingDefinition.LooseValueType = TemplateConstants.PropertyValue;
|
||||
schema.Definitions.Add(TemplateConstants.Properties, mappingDefinition);
|
||||
|
||||
// property-value
|
||||
oneOfDefinition = new OneOfDefinition();
|
||||
oneOfDefinition.OneOf.Add(TemplateConstants.NonEmptyString);
|
||||
oneOfDefinition.OneOf.Add(TemplateConstants.MappingPropertyValue);
|
||||
schema.Definitions.Add(TemplateConstants.PropertyValue, oneOfDefinition);
|
||||
|
||||
// mapping-property-value
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Type, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Required, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
|
||||
schema.Definitions.Add(TemplateConstants.MappingPropertyValue, mappingDefinition);
|
||||
|
||||
|
||||
// one-of-definition
|
||||
mappingDefinition = new MappingDefinition();
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
|
||||
schema.Definitions.Add(TemplateConstants.OneOfDefinition, mappingDefinition);
|
||||
|
||||
// non-empty-string
|
||||
@@ -477,4 +490,4 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
|
||||
private static readonly Regex s_definitionNameRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_-]*$", RegexOptions.Compiled);
|
||||
private static TemplateSchema s_schema;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -22,9 +22,11 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
internal const String ItemType = "item-type";
|
||||
internal const String LooseKeyType = "loose-key-type";
|
||||
internal const String LooseValueType = "loose-value-type";
|
||||
internal const String MaxConstant = "MAX";
|
||||
internal const String Mapping = "mapping";
|
||||
internal const String MappingDefinition = "mapping-definition";
|
||||
internal const String MappingDefinitionProperties = "mapping-definition-properties";
|
||||
internal const String MappingPropertyValue = "mapping-property-value";
|
||||
internal const String NonEmptyString = "non-empty-string";
|
||||
internal const String Null = "null";
|
||||
internal const String NullDefinition = "null-definition";
|
||||
@@ -35,7 +37,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
internal const String OneOf = "one-of";
|
||||
internal const String OneOfDefinition = "one-of-definition";
|
||||
internal const String OpenExpression = "${{";
|
||||
internal const String PropertyValue = "property-value";
|
||||
internal const String Properties = "properties";
|
||||
internal const String Required = "required";
|
||||
internal const String RequireNonEmpty = "require-non-empty";
|
||||
internal const String Scalar = "scalar";
|
||||
internal const String ScalarDefinition = "scalar-definition";
|
||||
@@ -43,6 +47,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
internal const String Sequence = "sequence";
|
||||
internal const String SequenceDefinition = "sequence-definition";
|
||||
internal const String SequenceDefinitionProperties = "sequence-definition-properties";
|
||||
internal const String Type = "type";
|
||||
internal const String SequenceOfNonEmptyString = "sequence-of-non-empty-string";
|
||||
internal const String String = "string";
|
||||
internal const String StringDefinition = "string-definition";
|
||||
|
||||
@@ -184,6 +184,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
id = FileIds.Count + 1;
|
||||
FileIds.Add(file, id);
|
||||
FileNames.Add(file);
|
||||
Memory.AddBytes(file);
|
||||
}
|
||||
|
||||
return id;
|
||||
@@ -191,7 +192,12 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
|
||||
internal String GetFileName(Int32 fileId)
|
||||
{
|
||||
return FileNames[fileId - 1];
|
||||
return FileNames.Count >= fileId ? FileNames[fileId - 1] : null;
|
||||
}
|
||||
|
||||
internal IReadOnlyList<String> GetFileTable()
|
||||
{
|
||||
return FileNames.AsReadOnly();
|
||||
}
|
||||
|
||||
private String GetErrorPrefix(
|
||||
@@ -199,9 +205,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
Int32? line,
|
||||
Int32? column)
|
||||
{
|
||||
if (fileId != null)
|
||||
var fileName = fileId.HasValue ? GetFileName(fileId.Value) : null;
|
||||
if (!String.IsNullOrEmpty(fileName))
|
||||
{
|
||||
var fileName = GetFileName(fileId.Value);
|
||||
if (line != null && column != null)
|
||||
{
|
||||
return $"{fileName} {TemplateStrings.LineColumn(line, column)}:";
|
||||
|
||||
@@ -47,7 +47,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
var evaluator = new TemplateEvaluator(context, template, removeBytes);
|
||||
try
|
||||
{
|
||||
var availableContext = new HashSet<String>(context.ExpressionValues.Keys);
|
||||
var availableContext = new HashSet<String>(context.ExpressionValues.Keys.Concat(context.ExpressionFunctions.Select(x => $"{x.Name}({x.MinParameters},{x.MaxParameters})")));
|
||||
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
|
||||
result = evaluator.Evaluate(definitionInfo);
|
||||
|
||||
@@ -182,12 +182,14 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
}
|
||||
|
||||
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
|
||||
var hasExpressionKey = false;
|
||||
|
||||
while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar))
|
||||
{
|
||||
// Expression
|
||||
if (nextKeyScalar is ExpressionToken)
|
||||
{
|
||||
hasExpressionKey = true;
|
||||
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
|
||||
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
|
||||
continue;
|
||||
@@ -268,6 +270,19 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
String listToDeDuplicate = String.Join(", ", nonDuplicates);
|
||||
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
|
||||
}
|
||||
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
|
||||
{
|
||||
foreach (var property in mappingDefinitions[0].Properties)
|
||||
{
|
||||
if (property.Value.Required)
|
||||
{
|
||||
if (!keys.Contains(property.Key))
|
||||
{
|
||||
m_context.Error(mapping, $"Required property is missing: {property.Key}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m_unraveler.ReadMappingEnd();
|
||||
}
|
||||
|
||||
@@ -178,14 +178,15 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
}
|
||||
|
||||
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
|
||||
var hasExpressionKey = false;
|
||||
|
||||
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
|
||||
{
|
||||
var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext);
|
||||
|
||||
// Expression
|
||||
if (nextKeyScalar is ExpressionToken)
|
||||
{
|
||||
hasExpressionKey = true;
|
||||
// Legal
|
||||
if (definition.AllowedContext.Length > 0)
|
||||
{
|
||||
@@ -280,7 +281,19 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
String listToDeDuplicate = String.Join(", ", nonDuplicates);
|
||||
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
|
||||
}
|
||||
|
||||
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
|
||||
{
|
||||
foreach (var property in mappingDefinitions[0].Properties)
|
||||
{
|
||||
if (property.Value.Required)
|
||||
{
|
||||
if (!keys.Contains(property.Key))
|
||||
{
|
||||
m_context.Error(mapping, $"Required property is missing: {property.Key}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ExpectMappingEnd();
|
||||
}
|
||||
|
||||
|
||||
@@ -30,14 +30,14 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
Column = column;
|
||||
}
|
||||
|
||||
[IgnoreDataMember]
|
||||
internal Int32? FileId { get; set; }
|
||||
[DataMember(Name = "file", EmitDefaultValue = false)]
|
||||
internal Int32? FileId { get; private set; }
|
||||
|
||||
[DataMember(Name = "line", EmitDefaultValue = false)]
|
||||
internal Int32? Line { get; }
|
||||
internal Int32? Line { get; private set; }
|
||||
|
||||
[DataMember(Name = "col", EmitDefaultValue = false)]
|
||||
internal Int32? Column { get; }
|
||||
internal Int32? Column { get; private set; }
|
||||
|
||||
[DataMember(Name = "type", EmitDefaultValue = false)]
|
||||
internal Int32 Type { get; }
|
||||
|
||||
@@ -115,13 +115,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
Object value,
|
||||
JsonSerializer serializer)
|
||||
{
|
||||
base.WriteJson(writer, value, serializer);
|
||||
if (value is TemplateToken token)
|
||||
{
|
||||
switch (token.Type)
|
||||
{
|
||||
case TokenType.Null:
|
||||
if (token.Line == null && token.Column == null)
|
||||
if (token.FileId == null && token.Line == null && token.Column == null)
|
||||
{
|
||||
writer.WriteNull();
|
||||
}
|
||||
@@ -130,12 +129,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -146,7 +150,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
|
||||
case TokenType.Boolean:
|
||||
var booleanToken = token as BooleanToken;
|
||||
if (token.Line == null && token.Column == null)
|
||||
if (token.FileId == null && token.Line == null && token.Column == null)
|
||||
{
|
||||
writer.WriteValue(booleanToken.Value);
|
||||
}
|
||||
@@ -155,12 +159,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -173,7 +182,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
|
||||
case TokenType.Number:
|
||||
var numberToken = token as NumberToken;
|
||||
if (token.Line == null && token.Column == null)
|
||||
if (token.FileId == null && token.Line == null && token.Column == null)
|
||||
{
|
||||
writer.WriteValue(numberToken.Value);
|
||||
}
|
||||
@@ -182,12 +191,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -200,7 +214,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
|
||||
case TokenType.String:
|
||||
var stringToken = token as StringToken;
|
||||
if (token.Line == null && token.Column == null)
|
||||
if (token.FileId == null && token.Line == null && token.Column == null)
|
||||
{
|
||||
writer.WriteValue(stringToken.Value);
|
||||
}
|
||||
@@ -209,12 +223,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -230,12 +249,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -253,12 +277,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -273,12 +302,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
@@ -301,12 +335,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
writer.WriteStartObject();
|
||||
writer.WritePropertyName("type");
|
||||
writer.WriteValue(token.Type);
|
||||
if (token.FileId != null)
|
||||
{
|
||||
writer.WritePropertyName("file");
|
||||
writer.WriteValue(token.FileId);
|
||||
}
|
||||
if (token.Line != null)
|
||||
{
|
||||
writer.WritePropertyName("line");
|
||||
writer.WriteValue(token.Line);
|
||||
}
|
||||
if (token.Line != null)
|
||||
if (token.Column != null)
|
||||
{
|
||||
writer.WritePropertyName("col");
|
||||
writer.WriteValue(token.Column);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user