mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
177 Commits
users/thbo
...
v2.273.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b75246e0fe | ||
|
|
a41a9ba8c7 | ||
|
|
c18643e529 | ||
|
|
0face6e3af | ||
|
|
306be41266 | ||
|
|
4e85b8f3b7 | ||
|
|
476640fd51 | ||
|
|
d05b9111c6 | ||
|
|
444332ca88 | ||
|
|
e6eb9e381d | ||
|
|
3a76a2e291 | ||
|
|
9976cb92a0 | ||
|
|
d900654c42 | ||
|
|
1d68b0448c | ||
|
|
65e3ec86b4 | ||
|
|
a7f205593a | ||
|
|
55f60a4ffc | ||
|
|
ca13b25240 | ||
|
|
b0c2734380 | ||
|
|
9e7b56f698 | ||
|
|
8c29e33e88 | ||
|
|
976217d6ec | ||
|
|
562eafab3a | ||
|
|
9015b95a72 | ||
|
|
7d4bbf46de | ||
|
|
7b608e3e92 | ||
|
|
f028b4e2b0 | ||
|
|
38f816c2ae | ||
|
|
bc1fe2cfe0 | ||
|
|
89a13db2c3 | ||
|
|
d59092d973 | ||
|
|
855b90c3d4 | ||
|
|
48ac96307c | ||
|
|
2e50dffb37 | ||
|
|
e7b0844772 | ||
|
|
d5a5550649 | ||
|
|
3d0147d322 | ||
|
|
bd1f245aac | ||
|
|
005f1c15b1 | ||
|
|
da3cb5506f | ||
|
|
32d439070b | ||
|
|
ec9f8f1682 | ||
|
|
0921af735a | ||
|
|
1cc3c08cf2 | ||
|
|
f9dca15c63 | ||
|
|
0877d9a533 | ||
|
|
d5e40c6a60 | ||
|
|
391bc35bb9 | ||
|
|
e4267b8434 | ||
|
|
2709cbc0ea | ||
|
|
5e0cde8649 | ||
|
|
cb2b323781 | ||
|
|
6c3958f365 | ||
|
|
9d7bd4706b | ||
|
|
5822a38c39 | ||
|
|
d42c9da2d7 | ||
|
|
121deedeb5 | ||
|
|
a0942ed345 | ||
|
|
7cef9a27ca | ||
|
|
df7e16954e | ||
|
|
4e7d27a53c | ||
|
|
89d1418e48 | ||
|
|
e728b8594d | ||
|
|
de4490d06d | ||
|
|
2e800f857e | ||
|
|
312c7668a8 | ||
|
|
eaf39bb058 | ||
|
|
5815819f24 | ||
|
|
1aea046932 | ||
|
|
eda463601c | ||
|
|
f994ae0542 | ||
|
|
3c5aef791c | ||
|
|
c4626d0c3a | ||
|
|
416a7ac4b8 | ||
|
|
11435857e4 | ||
|
|
6f260012a3 | ||
|
|
4fc87ddfc6 | ||
|
|
b45c1b9440 | ||
|
|
73307c0a30 | ||
|
|
cd8e4ddba1 | ||
|
|
abf59bdcb6 | ||
|
|
09cf59c1e0 | ||
|
|
7a65236022 | ||
|
|
462b5117c8 | ||
|
|
6922f3cb86 | ||
|
|
911135e66c | ||
|
|
01c9a8a8af | ||
|
|
33d2d2c328 | ||
|
|
a246b3b29d | ||
|
|
c7768d4a7b | ||
|
|
70729fb3c4 | ||
|
|
1470a3b6e2 | ||
|
|
2fadf430e4 | ||
|
|
f798f5606b | ||
|
|
3f7a01af93 | ||
|
|
d5c54f9819 | ||
|
|
9f78ad3b34 | ||
|
|
97883c8cd5 | ||
|
|
c5fa9fb062 | ||
|
|
b2dcdc21dc | ||
|
|
c126b52fe5 | ||
|
|
117ec1fff9 | ||
|
|
d5c7097d2c | ||
|
|
f9baec4b32 | ||
|
|
a20ad4e121 | ||
|
|
2bd0b1af0e | ||
|
|
baa6ded3bc | ||
|
|
7817e1a976 | ||
|
|
d90273a068 | ||
|
|
2cdde6cb16 | ||
|
|
1f52dfa636 | ||
|
|
83b5742278 | ||
|
|
ba69b5bc93 | ||
|
|
0e8777ebda | ||
|
|
a5f06b3ec2 | ||
|
|
be325f26a6 | ||
|
|
dec260920f | ||
|
|
b0a1294ef5 | ||
|
|
3d70ef2da1 | ||
|
|
e23d68f6e2 | ||
|
|
dff1024cd3 | ||
|
|
9fc0686dc2 | ||
|
|
ab001a7004 | ||
|
|
178a618e01 | ||
|
|
dfaf6e06ee | ||
|
|
b0a71481f0 | ||
|
|
88875ca1b0 | ||
|
|
a5eb8cb5c4 | ||
|
|
41f4ca3414 | ||
|
|
aa9f5bf070 | ||
|
|
2d6042421f | ||
|
|
c8890d0f3f | ||
|
|
53fb6297cb | ||
|
|
f9b5d626c5 | ||
|
|
d34afb54b1 | ||
|
|
e291ebc58a | ||
|
|
6bec1e3bb8 | ||
|
|
0cba42590f | ||
|
|
94e7560ccd | ||
|
|
d80ab095a5 | ||
|
|
2efd6f70e2 | ||
|
|
a6f144b014 | ||
|
|
5294a3ee06 | ||
|
|
745b90a8b2 | ||
|
|
0db908da8d | ||
|
|
68de3a94be | ||
|
|
a0a590fb48 | ||
|
|
87a232c477 | ||
|
|
a3c2479a29 | ||
|
|
c45aebc9ab | ||
|
|
b676ab3d33 | ||
|
|
0a6bac355d | ||
|
|
eb78d19b17 | ||
|
|
17970ad1f9 | ||
|
|
2e0e8eb822 | ||
|
|
2a506cc556 | ||
|
|
43dd34820b | ||
|
|
746c9d9ec0 | ||
|
|
fa2ecfcc4c | ||
|
|
c59c0e2ded | ||
|
|
7a382facb3 | ||
|
|
e9ae42693f | ||
|
|
9cafe8c028 | ||
|
|
1484c3fb03 | ||
|
|
53d632706d | ||
|
|
d6179242ca | ||
|
|
0da38a6924 | ||
|
|
b19e5d7924 | ||
|
|
80ac4a8964 | ||
|
|
02639a2092 | ||
|
|
a727194742 | ||
|
|
a9c58d7398 | ||
|
|
e15414eb5e | ||
|
|
4ab1e645c3 | ||
|
|
584f6b6ca3 | ||
|
|
abc65839f3 | ||
|
|
06292aa118 |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
@@ -1,9 +1,10 @@
|
|||||||
name: Runner CI
|
name: Runner CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- main
|
||||||
- releases/*
|
- releases/*
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
|
|||||||
35
.github/workflows/codeql.yml
vendored
Normal file
35
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: "Code Scanning - Action"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
CodeQL-Build:
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
|
|
||||||
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
|
# with:
|
||||||
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
|
|
||||||
|
- name: Manual build
|
||||||
|
run : |
|
||||||
|
./dev.sh layout Release linux-x64
|
||||||
|
working-directory: src
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
||||||
3
.github/workflows/release.yml
vendored
3
.github/workflows/release.yml
vendored
@@ -1,13 +1,14 @@
|
|||||||
name: Runner CD
|
name: Runner CD
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
paths:
|
paths:
|
||||||
- releaseVersion
|
- releaseVersion
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check:
|
check:
|
||||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/master'
|
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,6 +2,7 @@
|
|||||||
**/bin
|
**/bin
|
||||||
**/obj
|
**/obj
|
||||||
**/libs
|
**/libs
|
||||||
|
**/lib
|
||||||
|
|
||||||
# editors
|
# editors
|
||||||
**/*.xproj
|
**/*.xproj
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
[](https://github.com/actions/runner/actions)
|
[](https://github.com/actions/runner/actions)
|
||||||
|
|
||||||
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
|
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||||
|
|
||||||
## Get Started
|
## Get Started
|
||||||
|
|
||||||
|
|||||||
61
docs/adrs/0263-proxy-support.md
Normal file
61
docs/adrs/0263-proxy-support.md
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# ADR 263: Self Hosted Runner Proxies
|
||||||
|
|
||||||
|
**Date**: 2019-11-13
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
- Proxy support is required for some enterprises and organizations to start using their own self hosted runners
|
||||||
|
- While there is not a standard convention, many applications support setting proxies via the environmental variables `http_proxy`, `https_proxy`, `no_proxy`, such as curl, wget, perl, python, docker, git, R, ect
|
||||||
|
- Some of these applications use `HTTPS_PROXY` versus `https_proxy`, but most understand or primarily support the lowercase variant
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
We will update the Runner to use the conventional environment variables for proxies: `http_proxy`, `https_proxy` and `no_proxy` if they are set.
|
||||||
|
These are described in detail below:
|
||||||
|
- `https_proxy` a proxy URL for all https traffic. It may contain basic authentication credentials. For example:
|
||||||
|
- http://proxy.com
|
||||||
|
- http://127.0.0.1:8080
|
||||||
|
- http://user:password@proxy.com
|
||||||
|
- `http_proxy` a proxy URL for all http traffic. It may contain basic authentication credentials. For example:
|
||||||
|
- http://proxy.com
|
||||||
|
- http://127.0.0.1:8080
|
||||||
|
- http://user:password@proxy.com
|
||||||
|
- `no_proxy` a comma seperated list of hosts that should not use the proxy. An optional port may be specified
|
||||||
|
- `google.com`
|
||||||
|
- `yahoo.com:443`
|
||||||
|
- `google.com,bing.com`
|
||||||
|
|
||||||
|
We won't use `http_proxy` for https traffic when `https_proxy` is not set, this behavior lines up with any libcurl based tools (curl, git) and wget.
|
||||||
|
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to server without any problem. However, if user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it require `https_proxy` to be set for any https traffic.
|
||||||
|
|
||||||
|
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic base on my research.
|
||||||
|
|
||||||
|
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
|
||||||
|
|
||||||
|
A password provided for a proxy will be masked in the logs.
|
||||||
|
|
||||||
|
We will support the lowercase and uppercase variants, with lowercase taking priority if both are set.
|
||||||
|
|
||||||
|
### No Proxy Format
|
||||||
|
|
||||||
|
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insentive matches, and not support wildcards at this time.
|
||||||
|
For example:
|
||||||
|
- example.com will match example.com, foo.example.com, foo.bar.example.com
|
||||||
|
- foo.example.com will match bar.foo.example.com and foo.example.com
|
||||||
|
|
||||||
|
We will not support IP addresses for `no_proxy`, only hostnames.
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
1. Enterprises and organizations needing proxy support will be able to embrace self hosted runners
|
||||||
|
2. Users will need to set these environmental variables before configuring the runner in order to use a proxy when configuring
|
||||||
|
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
|
||||||
|
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
|
||||||
|
5. Action authors may need to update their workflows to react to the these environment variables
|
||||||
|
6. We will document the way of setting environmental variables for runners using the environmental variables and how the runner uses them
|
||||||
|
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||||
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# ADR 0274: Step outcome and conclusion
|
||||||
|
|
||||||
|
**Date**: 2020-01-13
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
|
||||||
|
|
||||||
|
This allows downstream a step to run based on whether a previous step succeeded or failed.
|
||||||
|
|
||||||
|
Reminder, currently the steps contains `steps.<id>.outputs`.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
|
||||||
|
|
||||||
|
- `success`
|
||||||
|
- `failure`
|
||||||
|
- `cancelled`
|
||||||
|
- `skipped`
|
||||||
|
|
||||||
|
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- id: experimental
|
||||||
|
continue-on-error: true
|
||||||
|
run: ./build.sh experimental
|
||||||
|
|
||||||
|
- if: ${{ steps.experimental.outcome == 'success' }}
|
||||||
|
run: ./publish.sh experimental
|
||||||
|
```
|
||||||
|
|
||||||
|
### Terminology
|
||||||
|
|
||||||
|
The runs API uses the term `conclusion`.
|
||||||
|
|
||||||
|
Therefore we use a different term `outcome` for the value prior to continue-on-error.
|
||||||
|
|
||||||
|
The following is a snippet from the runs API response payload:
|
||||||
|
|
||||||
|
```json
|
||||||
|
"steps": [
|
||||||
|
{
|
||||||
|
"name": "Set up job",
|
||||||
|
"status": "completed",
|
||||||
|
"conclusion": "success",
|
||||||
|
"number": 1,
|
||||||
|
"started_at": "2020-01-09T11:06:16.000-05:00",
|
||||||
|
"completed_at": "2020-01-09T11:06:18.000-05:00"
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
- Update runner
|
||||||
|
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)
|
||||||
263
docs/adrs/0276-problem-matchers.md
Normal file
263
docs/adrs/0276-problem-matchers.md
Normal file
@@ -0,0 +1,263 @@
|
|||||||
|
# ADR 0276: Problem Matchers
|
||||||
|
|
||||||
|
**Date** 2019-06-05
|
||||||
|
|
||||||
|
**Status** Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Compilation failures during a CI build should surface good error messages.
|
||||||
|
|
||||||
|
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
|
||||||
|
|
||||||
|
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||||
|
|
||||||
|
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
### Registration
|
||||||
|
|
||||||
|
#### Using `##` command
|
||||||
|
|
||||||
|
`##[add-matcher]path-to-problem-matcher-config.json`
|
||||||
|
|
||||||
|
Using a `##` command allows for flexibility:
|
||||||
|
- Ad hoc scripts can register problem matchers
|
||||||
|
- Allows problem matchers to be conditionally registered
|
||||||
|
|
||||||
|
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
|
||||||
|
|
||||||
|
#### Unregister using `##` command
|
||||||
|
|
||||||
|
A way out for rare cases where scoping is a problem.
|
||||||
|
|
||||||
|
`##[remove-matcher]owner`
|
||||||
|
|
||||||
|
For the this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||||
|
|
||||||
|
### Single line matcher
|
||||||
|
|
||||||
|
Consider the output:
|
||||||
|
|
||||||
|
```
|
||||||
|
[...]
|
||||||
|
|
||||||
|
Build FAILED.
|
||||||
|
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
|
||||||
|
(CoreCompile target) ->
|
||||||
|
Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||||
|
|
||||||
|
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
|
||||||
|
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
|
||||||
|
(CoreCompile target) ->
|
||||||
|
Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||||
|
|
||||||
|
1 Warning(s)
|
||||||
|
1 Error(s)
|
||||||
|
```
|
||||||
|
|
||||||
|
The below match configuration uses a regular expression to discover problem lines. And the match groups are mapped into issue-properties.
|
||||||
|
|
||||||
|
```json
|
||||||
|
"owner": "msbuild",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*([^:]+)\\((\\d+),(\\d+)\\): (error|warning) ([^:]+): (.*) \\[(.+)\\]$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"severity": 4,
|
||||||
|
"code": 5,
|
||||||
|
"message": 6,
|
||||||
|
"fromPath": 7
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
The above output and match configuration produces the following matches:
|
||||||
|
|
||||||
|
```
|
||||||
|
line: Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||||
|
file: Class1.cs
|
||||||
|
line: 16
|
||||||
|
column: 24
|
||||||
|
severity: warning
|
||||||
|
code: CS0612
|
||||||
|
message: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete
|
||||||
|
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
line: Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
|
||||||
|
file: Helpers\MyHelper.cs
|
||||||
|
line: 16
|
||||||
|
column: 30
|
||||||
|
severity: error
|
||||||
|
code: CS1002
|
||||||
|
message: ; expected
|
||||||
|
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||||
|
```
|
||||||
|
|
||||||
|
Additionally the line will appear red in the web UI (prefix with `##[error]`).
|
||||||
|
|
||||||
|
Note, an error does not imply task failure. Exit codes communicate failure.
|
||||||
|
|
||||||
|
Note, strip color codes when evaluating regular expressions.
|
||||||
|
|
||||||
|
### Multi-line matcher
|
||||||
|
|
||||||
|
Consider the below output from ESLint in stylish mode. The file name is printed once, yet multiple error lines are printed.
|
||||||
|
|
||||||
|
```
|
||||||
|
test.js
|
||||||
|
1:0 error Missing "use strict" statement strict
|
||||||
|
5:10 error 'addOne' is defined but never used no-unused-vars
|
||||||
|
✖ 2 problems (2 errors, 0 warnings)
|
||||||
|
```
|
||||||
|
|
||||||
|
The below match configuration uses multiple regular expressions, for the multiple lines.
|
||||||
|
|
||||||
|
And the last pattern of a multiline matcher can specify the `loop` property. This allows multiple errors to be discovered.
|
||||||
|
|
||||||
|
```json
|
||||||
|
"owner": "eslint-stylish",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^([^\\s].*)$",
|
||||||
|
"file": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s+(\\d+):(\\d+)\\s+(error|warning|info)\\s+(.*)\\s\\s+(.*)$",
|
||||||
|
"line": 1,
|
||||||
|
"column": 2,
|
||||||
|
"severity": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5,
|
||||||
|
"loop": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
The above output and match configuration produces two matches:
|
||||||
|
|
||||||
|
```
|
||||||
|
line: 1:0 error Missing "use strict" statement strict
|
||||||
|
file: test.js
|
||||||
|
line: 1
|
||||||
|
column: 0
|
||||||
|
severity: error
|
||||||
|
message: Missing "use strict" statement
|
||||||
|
code: strict
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
line: 5:10 error 'addOne' is defined but never used no-unused-vars
|
||||||
|
file: test.js
|
||||||
|
line: 5
|
||||||
|
column: 10
|
||||||
|
severity: error
|
||||||
|
message: 'addOne' is defined but never used
|
||||||
|
code: no-unused-vars
|
||||||
|
```
|
||||||
|
|
||||||
|
Note, in the above example only the error line will appear red in the web UI. The \"file\" line will not appear red.
|
||||||
|
|
||||||
|
### Other details
|
||||||
|
|
||||||
|
#### Configuration `owner`
|
||||||
|
|
||||||
|
Can be used to stomp over or remove.
|
||||||
|
|
||||||
|
#### Rooting the file
|
||||||
|
|
||||||
|
The goal of the file information is to provide a hyperlink in the UI.
|
||||||
|
|
||||||
|
Solving this problem means:
|
||||||
|
- Rooting the file when unrooted:
|
||||||
|
- Use the `fromPath` if specified (assume file path)
|
||||||
|
- Use the `github.workspace` (where the repo is cloned on disk)
|
||||||
|
- Match against a repository to determine the relative path within the repo
|
||||||
|
|
||||||
|
This is a place where we diverge from VSCode. VSCode task configuration are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||||
|
|
||||||
|
In order to avoid creating inaccurate hyperlinks on the error issues, the agent will verify the file exists and is in the main repository. Otherwise omit the file property from the error issue and debug trace what happened.
|
||||||
|
|
||||||
|
#### Supported severity levels
|
||||||
|
|
||||||
|
Ordinal ignore case:
|
||||||
|
|
||||||
|
- `warning`
|
||||||
|
- `error`
|
||||||
|
|
||||||
|
Coalesce empty with \"error\". For any other values, omit logging an issue and debug trace what happened.
|
||||||
|
|
||||||
|
#### Default severity level
|
||||||
|
|
||||||
|
Problem matchers are unable to interpret severity strings other than `warning` and `error`. The `severity` match group expects `warning` or `error` (case insensitive).
|
||||||
|
|
||||||
|
However some tools indicate error/warning in different ways. For example `flake8` uses codes like `E100`, `W200`, and `F300` (error, warning, fatal, respectively).
|
||||||
|
|
||||||
|
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers are registered - one for warnings and one for errors.
|
||||||
|
|
||||||
|
For example, given the following `flake8` output:
|
||||||
|
|
||||||
|
```
|
||||||
|
./bootcamp/settings.py:156:80: E501 line too long (94 > 79 characters)
|
||||||
|
./bootcamp/settings.py:165:5: F403 'from local_settings import *' used; unable to detect undefined names
|
||||||
|
```
|
||||||
|
|
||||||
|
Two problem matchers can be used:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+): ([EF]\\d+) (.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"code": 4,
|
||||||
|
"message": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warnings",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+): (W\\d+) (.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"code": 4,
|
||||||
|
"message": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Mitigate regular expression denial of service (ReDos)
|
||||||
|
|
||||||
|
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
|
||||||
|
After three unsuccessful attempts, warn and eject the matcher. The matcher will not run again for the duration of the job.
|
||||||
|
|
||||||
|
### Where we diverge from VSCode
|
||||||
|
|
||||||
|
- We added the `fromPath` concept for rooting paths. This is done differently in VSCode, since a task is the scope (root path well known). For us, the job is the scope.
|
||||||
|
- VSCode allows additional activation info background tasks that are always running (recompile on files changed). They allow regular expressions to define when the matcher scope begins and ends. This is an interesting concept that we could leverage to help solve our scoping problem.
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
- Setup actions should register problem matchers
|
||||||
93
docs/adrs/0277-run-action-shell-options.md
Normal file
93
docs/adrs/0277-run-action-shell-options.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# ADR 0277: Run action shell option
|
||||||
|
|
||||||
|
**Date** 2019-07-09
|
||||||
|
|
||||||
|
**Status** Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
run-actions run scripts using a platform specific shell:
|
||||||
|
`bash -eo pipefail` on non-windows, and `cmd.exe /c /d /s` on windows
|
||||||
|
|
||||||
|
The `shell` option overwrites this to allow different flags or completely different shells/interpreters
|
||||||
|
|
||||||
|
A small example is:
|
||||||
|
```yml
|
||||||
|
jobs:
|
||||||
|
bash-job:
|
||||||
|
actions:
|
||||||
|
- run: echo "Hello"
|
||||||
|
shell: bash
|
||||||
|
python-job:
|
||||||
|
actions:
|
||||||
|
- run: print("Hello")
|
||||||
|
shell: python {0}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### Shell option
|
||||||
|
The keyword being used is `shell`
|
||||||
|
|
||||||
|
`shell` can be either:
|
||||||
|
|
||||||
|
1. Builtins / Explicitly supported keywords. It is useful to support at least `cmd`, and `powershell` on Windows. Because `cmd my_cmd_script` and `powershell my_ps1_script` are not valid the same way many Linux/cross-platform interpreters are, e.g. `bash myscript` or `python myscript`. Those tools (and potentially others) also require the correct file extension to run, or must be run in a particular way to get the exit codes consistently, so we must have first class knowledge about them. We provide default templates for these keywords as follows:
|
||||||
|
- `cmd`: Default is: `%ComSpec% /D /E:ON /V:OFF /S /C "CALL "{0}""` where the script name is automatically appended with `.cmd` and substituted for `{0}`
|
||||||
|
- Note this is equivalent to the default Windows behavior if no shell option is given
|
||||||
|
- `pwsh`: Default is: `pwsh -command "& '{0}'"` where the script is automatically appended with `.ps1`
|
||||||
|
- `powershell`: Default is: `powershell -command "& '{0}'"` where the script is automatically appended with `.ps1`
|
||||||
|
- `bash`: Uses `bash --noprofile --norc -eo pipefail {0}`
|
||||||
|
- The default behavior on non-Windows if no shell is given is to attempt this first
|
||||||
|
- `sh`: Uses `sh -e {0}`
|
||||||
|
- This is the default behavior on non-Windows if no shell is given, AND `bash` (see above) was not located on the PATH
|
||||||
|
- `python`: `python {0}`
|
||||||
|
- **NOTE**: The exact command ran may vary by machine. We only provide default arguments and command format for the listed shell. While the above behavior is expected on hosted machines, private runners may vary. For example, `sh` (or other commands) may actually be a link to `/bin/dash`, `/bin/bash`, or other
|
||||||
|
|
||||||
|
1. A template string: `command [...options] {0} [...more_options]`
|
||||||
|
- As above, the file name of the temporary script will be templated in. This gives users more control to have options at any location relative to the script path
|
||||||
|
- The first whitespace-delimited word of the string will be interpreted as the command
|
||||||
|
- e.g. `python {0} arg1 arg2` or similar can be used if passing args is needed. Some shells will require other options after the filename for various reasons
|
||||||
|
|
||||||
|
Note that (1) simply provides defaults that are executed with the same mechanism as (2). That is:
|
||||||
|
- A temporary script file is generated, and the path to that file is templated into the string at `{0}`
|
||||||
|
- The first word of the formatted string is assumed to be a command, and we attempt to locate its full path
|
||||||
|
- The fully qualified path to the command, plus the remaining arguments, is executed
|
||||||
|
- e.g. `shell: bash` expands to `/bin/bash --noprofile --norc -eo pipefail /runner/_layout/_work/_temp/f8d4fb2b-19d9-47e6-a786-4cc538d52761.sh` on my private runner
|
||||||
|
|
||||||
|
At this time, **THE LIST OF WELL-KNOWN SHELL OPTIONS IS**:
|
||||||
|
- cmd - Windows (hosted vs2017, vs2019) only
|
||||||
|
- powershell - Windows (hosted vs2017, vs2019) only
|
||||||
|
- sh - All hosted platforms
|
||||||
|
- pwsh - All hosted platforms
|
||||||
|
- bash - All hosted platforms
|
||||||
|
- python - All hosted platforms. Can use setup-python to configure which python will be used
|
||||||
|
___
|
||||||
|
|
||||||
|
### Containers
|
||||||
|
For container jobs, `shell` should just work the same as above, transparently. We will simply `exec` the command in the job container, passing the same arguments in
|
||||||
|
|
||||||
|
___
|
||||||
|
|
||||||
|
### Exit codes / Error action preference
|
||||||
|
|
||||||
|
For builtin shells, we provide defaults that make the most sense for CI, running within Actions, and being executed by our runner
|
||||||
|
|
||||||
|
bash/sh:
|
||||||
|
- Fail-fast behavior using `set -e o pipefail` is the default for `bash` and `shell` builtins, and by default when no option is given on non-Windows platforms
|
||||||
|
- Users can opt out of fail-fast and take full control easily by providing a template string to the shell options, eg: `bash {0}`.
|
||||||
|
- sh-like shells exit with the exit code of the last command executed in a script, and is our default behavior. Thus the runner reports the status of the step as fail/succeed based on this exit code
|
||||||
|
|
||||||
|
powershell/pwsh
|
||||||
|
- Fail-fast behavior when possible. For `pwsh` and `powershell` builtins, we will prepend `$ErrorActionPreference = 'stop'` to script contents
|
||||||
|
- We append `if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE }` to powershell scripts to get Action statuses to reflect the script's last exit code
|
||||||
|
- Users can always opt out by not using the builtins, and providing a shell option like: `pwsh -File {0}`, or `powershell -Command "& '{0}'"`, depending on need
|
||||||
|
|
||||||
|
cmd
|
||||||
|
- There doesnt seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we cant actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||||
|
- cmd.exe will exit (return the error code to the runner) with the errorlevel of the last program it executed. This is internally consistent with the previous default behavior (sh, pwsh) and is the cmd.exe default, so we keep that behavior
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
Valid `shell` options will depend on the hosted images. We will need to maintain tight image compat
|
||||||
|
|
||||||
|
First class support for a shell will require a major version schema change to modify. We cannot remove or modify the behavior of a well-known supported option, However, adding first class support for new shells is backwards compatible. For instance, we can add a well-known `python` option, because non-well-known options would have always needed to include `{0}`, e.g. `python {0}`
|
||||||
60
docs/adrs/0278-env-context.md
Normal file
60
docs/adrs/0278-env-context.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# ADR 0278: Env Context
|
||||||
|
|
||||||
|
**Date**: 2019-09-30
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
User wants to reference workflow variables defined in workflow yaml file for action's input, displayName and condition.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
### Add `env` context in the runner
|
||||||
|
|
||||||
|
Runner will create and populate the `env` context for every job execution using following logic:
|
||||||
|
1. On job start, create `env` context with any environment variables in the job message, these are env defined in customer's YAML file's job/workflow level `env` section.
|
||||||
|
2. Update `env` context when customer use `::set-env::` to set env at the runner level.
|
||||||
|
3. Update `env` context with step's `env` block before each step runs.
|
||||||
|
|
||||||
|
The `env` context is only available in the runner, customer can't use the `env` context in any server evaluation part, just like the `runner` context
|
||||||
|
|
||||||
|
Example yaml:
|
||||||
|
```yaml
|
||||||
|
|
||||||
|
env:
|
||||||
|
env1: 10
|
||||||
|
env2: 20
|
||||||
|
env3: 30
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
env:
|
||||||
|
env1: 100
|
||||||
|
env2: 200
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: |
|
||||||
|
echo ${{ env.env1 }} // 1000
|
||||||
|
echo $env1 // 1000
|
||||||
|
echo $env2 // 200
|
||||||
|
echo $env3 // 30
|
||||||
|
if: env.env2 == 200 // true
|
||||||
|
name: ${{ env.env1 }}_${{ env.env2 }} //1000_200
|
||||||
|
env:
|
||||||
|
env1: 1000
|
||||||
|
```
|
||||||
|
|
||||||
|
### Don't populate the `env` context with environment variables from runner machine.
|
||||||
|
|
||||||
|
With job container and container action, the `env` context may not have the right value customer want and will cause confusion.
|
||||||
|
Ex:
|
||||||
|
```yaml
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest <- $USER=runner in hosted machine
|
||||||
|
container: ubuntu:16.04 <- $USER=root in container
|
||||||
|
steps:
|
||||||
|
- run: echo ${{env.USER}} <- what should customer expect this output? runner/root
|
||||||
|
- uses: docker://ubuntu:18.04
|
||||||
|
with:
|
||||||
|
args: echo ${{env.USER}} <- what should customer expect this output? runner/root
|
||||||
|
```
|
||||||
71
docs/adrs/0279-hashFiles-expression-function.md
Normal file
71
docs/adrs/0279-hashFiles-expression-function.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# ADR 0279: HashFiles Expression Function
|
||||||
|
|
||||||
|
**Date**: 2019-09-30
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
First party action `actions/cache` needs a input which is an explicit `key` used for restoring and saving the cache. For packages caching, the most comment `key` might be the hash result of contents from all `package-lock.json` under `node_modules` folder.
|
||||||
|
|
||||||
|
There are serval different ways to get the hash `key` input for `actions/cache` action.
|
||||||
|
|
||||||
|
1. Customer calculate the `key` themselves from a different action, customer won't like this since it needs extra step for using cache feature
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- run: |
|
||||||
|
hash=some_linux_hash_method(file1, file2, file3)
|
||||||
|
echo ::set-output name=hash::$hash
|
||||||
|
id: createHash
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
key: ${{ steps.createHash.outputs.hash }}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Make the `key` input of `actions/cache` follow certain convention to calculate hash, this limited the `key` input to a certain format customer may not want.
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
key: ${{ runner.os }}|${{ github.workspace }}|**/package-lock.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
### Add hashFiles() function to expression engine for calculate files' hash
|
||||||
|
|
||||||
|
`hashFiles()` will only allow on runner side since it needs to read files on disk, using `hashFiles()` on any server side evaluated expression will cause runtime errors.
|
||||||
|
|
||||||
|
`hashFiles()` will only support hashing files under the `$GITHUB_WORKSPACE` since the expression evaluated on the runner, if customer use job container or container action, the runner won't have access to file system inside the container.
|
||||||
|
|
||||||
|
`hashFiles()` will only take 1 parameters:
|
||||||
|
- `hashFiles('**/package-lock.json')` // Search files under $GITHUB_WORKSPACE and calculate a hash for them
|
||||||
|
|
||||||
|
**Question: Do we need to support more than one match patterns?**
|
||||||
|
Ex: `hashFiles('**/package-lock.json', '!toolkit/core/package-lock.json', '!toolkit/io/package-lock.json')`
|
||||||
|
Answer: Only support single match pattern for GA, we can always add later.
|
||||||
|
|
||||||
|
This will help customer has better experience with the `actions/cache` action's input.
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
key: ${{hashFiles('**/package-lock.json')}}-${{github.ref}}-${{runner.os}}
|
||||||
|
```
|
||||||
|
|
||||||
|
For search pattern, we will use basic globbing (`*` `?` and `[]`) and globstar (`**`).
|
||||||
|
|
||||||
|
Additional pattern details:
|
||||||
|
- Root relative paths with `github.workspace` (the main repo)
|
||||||
|
- Make `*` match files that start with `.`
|
||||||
|
- Case insensitive on Windows
|
||||||
|
- Accept `\` or `/` path separators on Windows
|
||||||
|
|
||||||
|
Hashing logic:
|
||||||
|
1. Get all files under `$GITHUB_WORKSPACE`.
|
||||||
|
2. Use search pattern filter all files to get files that matches the search pattern. (search pattern only apply to file path not folder path)
|
||||||
|
3. Sort all matched files by full file path in alphabet order.
|
||||||
|
4. Use SHA256 algorithm to hash each matched file and store hash result.
|
||||||
|
5. Use SHA256 to hash all stored files' hash results to get the final 64 chars hash result.
|
||||||
|
|
||||||
|
**Question: Should we include the folder structure info into the hash?**
|
||||||
|
Answer: No
|
||||||
30
docs/adrs/0280-command-input-echoing.md
Normal file
30
docs/adrs/0280-command-input-echoing.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# ADR 0280: Echoing of Command Input
|
||||||
|
|
||||||
|
**Date**: 2019-11-04
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Command echoing as a default behavior tends to clutter the user logs, so we want to swap to a system where users have to opt in to see this information.
|
||||||
|
|
||||||
|
Command outputs will still be echoed in the case there are any errors processing such commands. This is so the end user can have more context on why the command failed and help with troubleshooting.
|
||||||
|
|
||||||
|
Echo output in the user logs can be explicitly controlled by the new commands `::echo::on` and `::echo::off`. By default, echoing is enabled if `ACTIONS_STEP_DEBUG` secret is enabled, otherwise echoing is disabled.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
- The only commands that currently echo output are
|
||||||
|
- `remove-matcher`
|
||||||
|
- `add-matcher`
|
||||||
|
- `add-path`
|
||||||
|
- These will no longer echo the command, if processed successfully
|
||||||
|
- All commands echo the input when any of these conditions is fulfilled:
|
||||||
|
1. When such commands fail with an error
|
||||||
|
2. When `::echo::on` is set
|
||||||
|
3. When the `ACTIONS_STEP_DEBUG` is set, and echoing hasn't been explicitly disabled with `::echo::off`
|
||||||
|
- There are a few commands that won't be echoed, even when echo is enabled. These are (as of 2019/11/04):
|
||||||
|
- `add-mask`
|
||||||
|
- `debug`
|
||||||
|
- `warning`
|
||||||
|
- `error`
|
||||||
|
- The three commands above will not echo, either because echoing the command would leak secrets (e.g. `add-mask`), or it would not add any additional troubleshooting information to the logs (e.g. `debug`). It's expected that future commands would follow these "echo-suppressing" guidelines as well. Echo-suppressed commands are still free to output other information to the logs, as deemed fit.
|
||||||
48
docs/adrs/0297-base64-masking-trailing-characters.md
Normal file
48
docs/adrs/0297-base64-masking-trailing-characters.md
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
# ADR 0297: Base64 Masking Trailing Characters
|
||||||
|
|
||||||
|
**Date** 2020-01-21
|
||||||
|
|
||||||
|
**Status** Proposed
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
The Runner registers a number of Value Encoders, which mask various encodings of a provided secret. Currently, we register a 3 base64 Encoders:
|
||||||
|
- The base64 encoded secret
|
||||||
|
- The secret with the first character removed then base64 encoded
|
||||||
|
- The secret with the first two characters removed then base64 encoded
|
||||||
|
|
||||||
|
This gives us good coverage across the board for secrets and secrets with a prefix (i.e. `base64($user:$pass)`).
|
||||||
|
|
||||||
|
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
|
||||||
|
|
||||||
|
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
### Trim end characters
|
||||||
|
|
||||||
|
We are going to modify all existing base64 encoders to trim information before registering as a secret.
|
||||||
|
We will trim:
|
||||||
|
- `=` from the end of all base64 strings. This is a padding character that contains no information.
|
||||||
|
- Based on the number of `=`'s at the end of a base64 string, a malicious user could predict the length of the original secret modulo 3.
|
||||||
|
- If a user saw `***==`, they would know the secret could be 1,4,7,10... characters.
|
||||||
|
- If a string contains `=` we will also trim the last non-padding character from the base64 secret.
|
||||||
|
- This character can change if a string is appended to the secret before the encoding.
|
||||||
|
|
||||||
|
|
||||||
|
### Register a fourth encoder
|
||||||
|
|
||||||
|
We will also add back in the original base64 encoded secret encoder for four total encoders:
|
||||||
|
- The base64 encoded secret
|
||||||
|
- The base64 encoded secret trimmed
|
||||||
|
- The secret with the first character removed then base64 encoded and trimmed
|
||||||
|
- The secret with the first two characters removed then base64 encoded and trimmed
|
||||||
|
|
||||||
|
This allows us to fully cover the most common scenario where a user base64 encodes their secret and expects the entire thing to be masked.
|
||||||
|
This will result in us only revealing length or bit information when a prefix or suffix is added to a secret before encoding.
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
|
||||||
|
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
|
||||||
|
- Performance will suffer in a neglible way
|
||||||
35
docs/adrs/0354-runner-machine-info.md
Normal file
35
docs/adrs/0354-runner-machine-info.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# ADR 354: Expose runner machine info
|
||||||
|
|
||||||
|
**Date**: 2020-03-02
|
||||||
|
|
||||||
|
**Status**: Pending
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
|
||||||
|
Ex: Include OS/Software info from Hosted image.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"group": "OS Detail",
|
||||||
|
"detail": "........"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"group": "Software Detail",
|
||||||
|
"detail": "........"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
|
||||||
|
|
||||||
|
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
|
||||||
|
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
|
||||||
|
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.
|
||||||
75
docs/adrs/0361-wrapper-action.md
Normal file
75
docs/adrs/0361-wrapper-action.md
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
# ADR 361: Wrapper Action
|
||||||
|
|
||||||
|
**Date**: 2020-03-06
|
||||||
|
|
||||||
|
**Status**: Pending
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
In addition to action's regular execution, action author may wants their action has a chance to participate in:
|
||||||
|
- Job initialize
|
||||||
|
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
|
||||||
|
- Job cleanup
|
||||||
|
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
|
||||||
|
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
### Add `pre` and `post` execution to action
|
||||||
|
|
||||||
|
Node Action Example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: 'My action with pre'
|
||||||
|
description: 'My action with pre'
|
||||||
|
runs:
|
||||||
|
using: 'node12'
|
||||||
|
pre: 'setup.js'
|
||||||
|
pre-if: 'success()' // Optional
|
||||||
|
main: 'index.js'
|
||||||
|
post: 'cleanup.js'
|
||||||
|
post-if: 'success()' // Optional
|
||||||
|
```
|
||||||
|
|
||||||
|
Container Action Example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
name: 'My action with pre'
|
||||||
|
description: 'My action with pre'
|
||||||
|
runs:
|
||||||
|
using: 'docker'
|
||||||
|
image: 'mycontainer:latest'
|
||||||
|
pre-entrypoint: 'setup.sh'
|
||||||
|
pre-if: 'success()' // Optional
|
||||||
|
entrypoint: 'entrypoint.sh'
|
||||||
|
post-entrypoint: 'cleanup.sh'
|
||||||
|
post-if: 'success()' // Optional
|
||||||
|
```
|
||||||
|
|
||||||
|
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
|
||||||
|
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
|
||||||
|
`pre` executes in order of how the steps are defined.
|
||||||
|
`pre` will always be added to job steps list during job setup.
|
||||||
|
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
|
||||||
|
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
|
||||||
|
|
||||||
|
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
|
||||||
|
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
|
||||||
|
|
||||||
|
Valid action:
|
||||||
|
- only has `main`
|
||||||
|
- has `pre` and `main`
|
||||||
|
- has `main` and `post`
|
||||||
|
- has `pre`, `main` and `post`
|
||||||
|
|
||||||
|
Invalid action:
|
||||||
|
- only has `pre`
|
||||||
|
- only has `post`
|
||||||
|
- has `pre` and `post`
|
||||||
|
|
||||||
|
Potential downside of introducing `pre`:
|
||||||
|
|
||||||
|
- Extra magic wrt step order. Users should control the step order. Especially when we introduce templates.
|
||||||
|
- Eliminates the possibility to lazily download the action tarball, since `pre` always run by default, we have to download the tarball to check whether action defined a `pre`
|
||||||
|
- `pre` doesn't work with local action, we suggested customer use local action for testing their action changes, ex CI for their action, to avoid delay between `git push` and GitHub repo tarball download api.
|
||||||
|
- Condition on the `pre` can't be controlled using dynamic step outputs. `pre` executes too early.
|
||||||
56
docs/adrs/0397-runner-registration-labels.md
Normal file
56
docs/adrs/0397-runner-registration-labels.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# ADR 0397: Support adding custom labels during runner config
|
||||||
|
**Date**: 2020-03-30
|
||||||
|
|
||||||
|
**Status**: Approved
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
|
||||||
|
|
||||||
|
See Issue: https://github.com/actions/runner/issues/262
|
||||||
|
|
||||||
|
This is another version of [ADR275](https://github.com/actions/runner/pull/275)
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
|
||||||
|
|
||||||
|
For example, to add a single extra label the operator could run:
|
||||||
|
```bash
|
||||||
|
./config.sh --labels mylabel
|
||||||
|
```
|
||||||
|
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
|
||||||
|
|
||||||
|
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
|
||||||
|
```yaml
|
||||||
|
runs-on: [self-hosted, mylabel]
|
||||||
|
```
|
||||||
|
|
||||||
|
To add multiple labels the operator could run:
|
||||||
|
```bash
|
||||||
|
./config.sh --labels mylabel,anotherlabel
|
||||||
|
```
|
||||||
|
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
|
||||||
|
|
||||||
|
This would add the label `mylabel` and `anotherlabel` to the runner, and enable users to select the runner in their workflow using this label:
|
||||||
|
```yaml
|
||||||
|
runs-on: [self-hosted, mylabel, anotherlabel]
|
||||||
|
```
|
||||||
|
|
||||||
|
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
|
||||||
|
|
||||||
|
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
|
||||||
|
|
||||||
|
## Replace
|
||||||
|
|
||||||
|
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
|
||||||
|
|
||||||
|
## Overriding built-in labels
|
||||||
|
|
||||||
|
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
|
||||||
|
|
||||||
|
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
The ability to add custom labels to a self-hosted runner would enable most scenarios where job runner selection based on runner capabilities or characteristics are required.
|
||||||
378
docs/adrs/0549-composite-run-steps.md
Normal file
378
docs/adrs/0549-composite-run-steps.md
Normal file
@@ -0,0 +1,378 @@
|
|||||||
|
# ADR 0549: Composite Run Steps
|
||||||
|
|
||||||
|
**Date**: 2020-06-17
|
||||||
|
|
||||||
|
**Status**: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
|
||||||
|
|
||||||
|
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
|
||||||
|
|
||||||
|
### Guiding Principles
|
||||||
|
|
||||||
|
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
|
||||||
|
|
||||||
|
A composite action is treated as **one** individual job step (this is known as encapsulation).
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
|
||||||
|
|
||||||
|
### Composite Run Steps Features
|
||||||
|
This feature supports at the top action level:
|
||||||
|
- name
|
||||||
|
- description
|
||||||
|
- inputs
|
||||||
|
- runs
|
||||||
|
- outputs
|
||||||
|
|
||||||
|
This feature supports at the run step level:
|
||||||
|
- name
|
||||||
|
- id
|
||||||
|
- run
|
||||||
|
- env
|
||||||
|
- shell
|
||||||
|
- working-directory
|
||||||
|
|
||||||
|
This feature **does not support** at the run step level:
|
||||||
|
- timeout-minutes
|
||||||
|
- secrets
|
||||||
|
- conditionals (needs, if, etc.)
|
||||||
|
- continue-on-error
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
Example `workflow.yml`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: self-hosted
|
||||||
|
steps:
|
||||||
|
- id: step1
|
||||||
|
uses: actions/setup-python@v1
|
||||||
|
- id: step2
|
||||||
|
uses: actions/setup-node@v2
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: user/composite@v1
|
||||||
|
- name: workflow step 1
|
||||||
|
run: echo hello world 3
|
||||||
|
- name: workflow step 2
|
||||||
|
run: echo hello world 4
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- run: pip install -r requirements.txt
|
||||||
|
shell: bash
|
||||||
|
- run: npm install
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Example Output
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
[npm installation output]
|
||||||
|
[pip requirements output]
|
||||||
|
echo hello world 3
|
||||||
|
echo hello world 4
|
||||||
|
```
|
||||||
|
|
||||||
|
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
|
||||||
|
|
||||||
|
### Defaults
|
||||||
|
|
||||||
|
We will not support "defaults" in a composite action.
|
||||||
|
|
||||||
|
### Shell and Working-directory
|
||||||
|
|
||||||
|
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
|
||||||
|
|
||||||
|
For example,
|
||||||
|
|
||||||
|
`action.yml`
|
||||||
|
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
inputs:
|
||||||
|
shell_1:
|
||||||
|
description: 'Your name'
|
||||||
|
default: 'pwsh'
|
||||||
|
steps:
|
||||||
|
- run: echo 1
|
||||||
|
shell: ${{ inputs.shell_1 }}
|
||||||
|
```
|
||||||
|
|
||||||
|
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
|
||||||
|
|
||||||
|
### Running Local Scripts
|
||||||
|
|
||||||
|
Example 'workflow.yml':
|
||||||
|
```yaml
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: self-hosted
|
||||||
|
steps:
|
||||||
|
- uses: user/composite@v1
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- run: chmod +x ${{ github.action_path }}/test/script2.sh
|
||||||
|
shell: bash
|
||||||
|
- run: chmod +x $GITHUB_ACTION_PATH/script.sh
|
||||||
|
shell: bash
|
||||||
|
- run: ${{ github.action_path }}/test/script2.sh
|
||||||
|
shell: bash
|
||||||
|
- run: $GITHUB_ACTION_PATH/script.sh
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
Where `user/composite` has the file structure:
|
||||||
|
```
|
||||||
|
.
|
||||||
|
+-- action.yml
|
||||||
|
+-- script.sh
|
||||||
|
+-- test
|
||||||
|
| +-- script2.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Users will be able to run scripts located in their action folder by first prepending the relative path and script name with `$GITHUB_ACTION_PATH` or `github.action_path` which contains the path in which the composite action is downloaded to and where those "files" live. Note, you'll have to use `chmod` before running each script if you do not git check in your script files into your github repo with the executable bit turned on.
|
||||||
|
|
||||||
|
### Inputs
|
||||||
|
|
||||||
|
Example `workflow.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- id: foo
|
||||||
|
uses: user/composite@v1
|
||||||
|
with:
|
||||||
|
your_name: "Octocat"
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
inputs:
|
||||||
|
your_name:
|
||||||
|
description: 'Your name'
|
||||||
|
default: 'Ethan'
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- run: echo hello ${{ inputs.your_name }}
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Example Output:
|
||||||
|
|
||||||
|
```
|
||||||
|
hello Octocat
|
||||||
|
```
|
||||||
|
|
||||||
|
Each input variable in the composite action is only viewable in its own scope.
|
||||||
|
|
||||||
|
### Outputs
|
||||||
|
|
||||||
|
Example `workflow.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
...
|
||||||
|
steps:
|
||||||
|
- id: foo
|
||||||
|
uses: user/composite@v1
|
||||||
|
- run: echo random-number ${{ steps.foo.outputs.random-number }}
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
outputs:
|
||||||
|
random-number:
|
||||||
|
description: "Random number"
|
||||||
|
value: ${{ steps.random-number-generator.outputs.random-id }}
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- id: random-number-generator
|
||||||
|
run: echo "::set-output name=random-id::$(echo $RANDOM)"
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
Example Output:
|
||||||
|
|
||||||
|
```
|
||||||
|
::set-output name=my-output::43243
|
||||||
|
random-number 43243
|
||||||
|
```
|
||||||
|
|
||||||
|
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
|
||||||
|
|
||||||
|
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
|
||||||
|
|
||||||
|
### Context
|
||||||
|
|
||||||
|
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
|
||||||
|
|
||||||
|
### Environment
|
||||||
|
|
||||||
|
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
|
||||||
|
|
||||||
|
### Secrets
|
||||||
|
|
||||||
|
**We will not support "Secrets" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||||
|
|
||||||
|
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
|
||||||
|
|
||||||
|
|
||||||
|
### If Condition
|
||||||
|
|
||||||
|
** If and needs conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
|
||||||
|
|
||||||
|
Old reasoning:
|
||||||
|
|
||||||
|
Example `workflow.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- run: exit 1
|
||||||
|
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
|
||||||
|
if: always()
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- run: echo "just succeeding"
|
||||||
|
shell: bash
|
||||||
|
- run: echo "I will run, as my current scope is succeeding"
|
||||||
|
shell: bash
|
||||||
|
if: success()
|
||||||
|
- run: exit 1
|
||||||
|
shell: bash
|
||||||
|
- run: echo "I will not run, as my current scope is now failing"
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
**We will not support "if Condition" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||||
|
|
||||||
|
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
|
||||||
|
|
||||||
|
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
|
||||||
|
|
||||||
|
**Note that the if condition on the parent does not propagate to the rest of its children though.**
|
||||||
|
|
||||||
|
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
|
||||||
|
|
||||||
|
|
||||||
|
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
|
||||||
|
|
||||||
|
### Timeout-minutes
|
||||||
|
|
||||||
|
Example `workflow.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- id: bar
|
||||||
|
uses: user/test@v1
|
||||||
|
timeout-minutes: 50
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- id: foo1
|
||||||
|
run: echo test 1
|
||||||
|
timeout-minutes: 10
|
||||||
|
shell: bash
|
||||||
|
- id: foo2
|
||||||
|
run: echo test 2
|
||||||
|
shell: bash
|
||||||
|
- id: foo3
|
||||||
|
run: echo test 3
|
||||||
|
timeout-minutes: 10
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
**We will not support "timeout-minutes" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||||
|
|
||||||
|
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
|
||||||
|
|
||||||
|
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
|
||||||
|
|
||||||
|
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
|
||||||
|
|
||||||
|
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
|
||||||
|
|
||||||
|
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
|
||||||
|
|
||||||
|
|
||||||
|
### Continue-on-error
|
||||||
|
|
||||||
|
Example `workflow.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- run: exit 1
|
||||||
|
- id: bar
|
||||||
|
uses: user/test@v1
|
||||||
|
continue-on-error: false
|
||||||
|
- id: foo
|
||||||
|
run: echo "Hello World" <------- This step will not run
|
||||||
|
```
|
||||||
|
|
||||||
|
Example `user/composite/action.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- run: exit 1
|
||||||
|
continue-on-error: true
|
||||||
|
shell: bash
|
||||||
|
- run: echo "Hello World 2" <----- This step will run
|
||||||
|
shell: bash
|
||||||
|
```
|
||||||
|
|
||||||
|
**We will not support "continue-on-error" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||||
|
|
||||||
|
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
|
||||||
|
|
||||||
|
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
|
||||||
|
|
||||||
|
### Visualizing Composite Action in the GitHub Actions UI
|
||||||
|
We want all the composite action's steps to be condensed into the original composite action node.
|
||||||
|
|
||||||
|
Here is a visual represenation of the [first example](#Steps)
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
| composite_action_node |
|
||||||
|
| echo hello world 1 |
|
||||||
|
| echo hello world 2 |
|
||||||
|
| echo hello world 3 |
|
||||||
|
| echo hello world 4 |
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Consequences
|
||||||
|
|
||||||
|
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.
|
||||||
57
docs/automate.md
Normal file
57
docs/automate.md
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Automate Configuring Self-Hosted Runners
|
||||||
|
|
||||||
|
|
||||||
|
## Export PAT
|
||||||
|
|
||||||
|
Before running any of these sample scripts, create a GitHub PAT and export it before running the script
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export RUNNER_CFG_PAT=yourPAT
|
||||||
|
```
|
||||||
|
|
||||||
|
## Create running as a service
|
||||||
|
|
||||||
|
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||||
|
|
||||||
|
- Resolving latest released runner
|
||||||
|
- Download and extract latest
|
||||||
|
- Acquire a registration token
|
||||||
|
- Configure the runner
|
||||||
|
- Run as a systemd (linux) or Launchd (osx) service
|
||||||
|
|
||||||
|
:point_right: [Sample script here](../scripts/create-latest-svc.sh) :point_left:
|
||||||
|
|
||||||
|
Run as a one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||||
|
```bash
|
||||||
|
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
|
||||||
|
```
|
||||||
|
|
||||||
|
## Uninstall running as service
|
||||||
|
|
||||||
|
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||||
|
|
||||||
|
- Stops and uninstalls the systemd (linux) or Launchd (osx) service
|
||||||
|
- Acquires a removal token
|
||||||
|
- Removes the runner
|
||||||
|
|
||||||
|
:point_right: [Sample script here](../scripts/remove-svc.sh) :point_left:
|
||||||
|
|
||||||
|
Repo level one liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||||
|
```bash
|
||||||
|
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove-svc.sh | bash -s yourorg/yourrepo
|
||||||
|
```
|
||||||
|
|
||||||
|
### Delete an offline runner
|
||||||
|
|
||||||
|
**Scenario**: Deletes a registered runner that is offline:
|
||||||
|
|
||||||
|
- Ensures the runner is offline
|
||||||
|
- Resolves id from name
|
||||||
|
- Deletes the runner
|
||||||
|
|
||||||
|
:point_right: [Sample script here](../scripts/delete.sh) :point_left:
|
||||||
|
|
||||||
|
Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level) and replace runnername
|
||||||
|
```bash
|
||||||
|
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/delete.sh | bash -s yourorg/yourrepo runnername
|
||||||
|
```
|
||||||
@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
|
|||||||
|
|
||||||
### Required Dev Dependencies
|
### Required Dev Dependencies
|
||||||
|
|
||||||
 Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
  Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||||
|
|
||||||
### To Build, Test, Layout
|
### To Build, Test, Layout
|
||||||
|
|
||||||
@@ -43,17 +43,31 @@ Sample developer flow:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/actions/runner
|
git clone https://github.com/actions/runner
|
||||||
|
cd runner
|
||||||
cd ./src
|
cd ./src
|
||||||
./dev.(sh/cmd) layout # the runner that build from source is in {root}/_layout
|
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
|
||||||
<make code changes>
|
<make code changes>
|
||||||
./dev.(sh/cmd) build # {root}/_layout will get updated
|
./dev.(sh/cmd) build # {root}/_layout will get updated
|
||||||
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
||||||
```
|
```
|
||||||
|
|
||||||
|
View logs:
|
||||||
|
```bash
|
||||||
|
cd runner/_layout/_diag
|
||||||
|
ls
|
||||||
|
cat (Runner/Worker)_TIMESTAMP.log # view your log file
|
||||||
|
```
|
||||||
|
|
||||||
|
Run Runner:
|
||||||
|
```bash
|
||||||
|
cd runner/_layout
|
||||||
|
./run.sh # run your custom runner
|
||||||
|
```
|
||||||
|
|
||||||
### Editors
|
### Editors
|
||||||
|
|
||||||
[Using Visual Studio Code](https://code.visualstudio.com/)
|
[Using Visual Studio Code](https://code.visualstudio.com/)
|
||||||
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
|
[Using Visual Studio](https://code.visualstudio.com/docs)
|
||||||
|
|
||||||
### Styling
|
### Styling
|
||||||
|
|
||||||
|
|||||||
61
docs/design/auth.md
Normal file
61
docs/design/auth.md
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
# Runner Authentication and Authorization
|
||||||
|
|
||||||
|
## Goals
|
||||||
|
- Support runner installs in untrusted domains.
|
||||||
|
- The account that configures or runs the runner process is not relevant for accessing GitHub resources.
|
||||||
|
- Accessing GitHub resources is done with a per-job token which expires when job completes.
|
||||||
|
- The token is granted to trusted parts of the system including the runner, actions and script steps specified by the workflow author as trusted.
|
||||||
|
- All OAuth tokens that come from the Token Service that the runner uses to access Actions Service resources are the same. It's just the scope and expiration of the token that may vary.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Configuring a self-hosted runner is [covered here in the documentation](https://help.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners).
|
||||||
|
|
||||||
|
Configuration is done with the user being authenticated via a time-limited, GitHub runner registration token.
|
||||||
|
|
||||||
|
*Your credentials are never used for registering the runner with the service.*
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
During configuration, an RSA public/private key pair is created, the private key is stored in file on disk. On Windows, the content is protected with DPAPI (machine level encrypted - runner only valid on that machine) and on Linux/OSX with `chmod` permissions.
|
||||||
|
|
||||||
|
Using your credentials, the runner is registered with the service by sending the public key to the service which adds that runner to the pool and stores the public key, the Token Service will generate a `clientId` associated with the public key.
|
||||||
|
|
||||||
|
## Start and Listen
|
||||||
|
|
||||||
|
After configuring the runner, the runner can be started interactively (`./run.cmd` or `./run.sh`) or as a service.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
On start, the runner listener process loads the RSA private key (on Windows decrypting with machine key DPAPI), and asks the Token Service for an OAuth token which is signed with the RSA private key.
|
||||||
|
The server then responds with an OAuth token that grants permission to access the message queue (HTTP long poll), allowing the runner to acquire the messages it will eventually run.
|
||||||
|
|
||||||
|
## Run a workflow
|
||||||
|
|
||||||
|
When a workflow is run, its labels are evaluated, it is matched to a runner and a message is placed in a queue of messages for that runner.
|
||||||
|
The runner then starts listening for jobs via the message queue HTTP long poll.
|
||||||
|
The message is encrypted with the runner's public key, stored during runner configuration.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
A workflow is queued as a result of a triggered [event](https://help.github.com/en/actions/reference/events-that-trigger-workflows). Workflows can be scheduled to [run at specific UTC times](https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) using POSIX `cron` syntax.
|
||||||
|
An [OAuth token](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) is generated, granting limited access to the host in Actions Service associated with the github.com repository/organization.
|
||||||
|
The lifetime of the OAuth token is the lifetime of the run or at most the [job timeout (default: 6 hours)](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes), plus 10 additional minutes.
|
||||||
|
|
||||||
|
## Accessing GitHub resources
|
||||||
|
|
||||||
|
The job message sent to the runner contains the OAuth token to talk back to the Actions Service.
|
||||||
|
The runner listener parent process will spawn a runner worker process for that job and send it the job message over IPC.
|
||||||
|
The token is never persisted.
|
||||||
|
|
||||||
|
Each action is run as a unique subprocess.
|
||||||
|
The encrypted access token will be provided as an environment variable in each action subprocess.
|
||||||
|
The token is registered with the runner as a secret and scrubbed from the logs as they are written.
|
||||||
|
|
||||||
|
Authentication in a workflow run to github.com can be accomplished by using the [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#about-the-github_token-secret)) secret. This token expires after 60 minutes. Please note that this token is different from the OAuth token that the runner uses to talk to the Actions Service.
|
||||||
|
|
||||||
|
## Hosted runner authentication
|
||||||
|
|
||||||
|
Hosted runner authentication differs from self-hosted authentication in that runners do not undergo a registration process, but instead, the hosted runners get the OAuth token directly by reading the `.credentials` file. The scope of this particular token is limited for a given workflow job execution, and the token is revoked as soon as the job is finished.
|
||||||
|
|
||||||
|

|
||||||
BIN
docs/res/hosted-config-start.png
Normal file
BIN
docs/res/hosted-config-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
52
docs/res/runner-auth-diags.txt
Normal file
52
docs/res/runner-auth-diags.txt
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# Markup used to generate the runner auth diagrams: https://websequencediagrams.com
|
||||||
|
|
||||||
|
title Runner Configuration (self-hosted only)
|
||||||
|
|
||||||
|
note left of Runner: GitHub repo URL as input
|
||||||
|
Runner->github.com: Retrieve Actions Service access using runner registration token
|
||||||
|
github.com->Runner: Access token for Actions Service
|
||||||
|
note left of Runner: Generate RSA key pair
|
||||||
|
note left of Runner: Store encrypted RSA private key on disk
|
||||||
|
Runner->Actions Service: Register runner using Actions Service access token
|
||||||
|
note right of Runner: Runner name, RSA public key sent
|
||||||
|
note right of Actions Service: Public key stored
|
||||||
|
Actions Service->Token Service: Register runner as an app along with the RSA public key
|
||||||
|
note right of Token Service: Public key stored
|
||||||
|
Token Service->Actions Service: Client Id for the runner application
|
||||||
|
Actions Service->Runner: Client Id and Token Endpoint URL
|
||||||
|
note left of Runner: Store runner configuration info into .runner file
|
||||||
|
note left of Runner: Store Token registration info into .credentials file
|
||||||
|
|
||||||
|
title Runner Start and Running (self-hosted only)
|
||||||
|
|
||||||
|
Runner.Listener->Runner.Listener: Start
|
||||||
|
note left of Runner.Listener: Load config info from .runner
|
||||||
|
note left of Runner.Listener: Load token registration from .credentials
|
||||||
|
Runner.Listener->Token Service: Exchange OAuth token (happens every 50 mins)
|
||||||
|
note right of Runner.Listener: Construct JWT token, use Client Id signed by RSA private key
|
||||||
|
note left of Actions Service: Find corresponding RSA public key, use Client Id\nVerify JWT token's signature
|
||||||
|
Token Service->Runner.Listener: OAuth token with limited permission and valid for 50 mins
|
||||||
|
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token
|
||||||
|
Actions Service->Runner.Listener: Workflow job
|
||||||
|
|
||||||
|
title Running workflow
|
||||||
|
|
||||||
|
Runner.Listener->Service (Message Queue): Get message
|
||||||
|
note right of Runner.Listener: Authenticate with exchanged OAuth token
|
||||||
|
Event->Actions Service: Queue workflow
|
||||||
|
Actions Service->Actions Service: Generate OAuth token per job
|
||||||
|
Actions Service->Actions Service: Build job message with the OAuth token
|
||||||
|
Actions Service->Actions Service: Encrypt job message with the target runner's public key
|
||||||
|
Actions Service->Service (Message Queue): Send encrypted job message to runner
|
||||||
|
Service (Message Queue)->Runner.Listener: Send job
|
||||||
|
note right of Runner.Listener: Decrypt message with runner's private key
|
||||||
|
Runner.Listener->Runner.Worker: Create worker process per job and run the job
|
||||||
|
|
||||||
|
title Runner Configuration, Start and Running (hosted only)
|
||||||
|
|
||||||
|
Machine Management Service->Runner.Listener: Construct .runner configuration file, store token in .credentials
|
||||||
|
Runner.Listener->Runner.Listener: Start
|
||||||
|
note left of Runner.Listener: Load config info from .runner
|
||||||
|
note left of Runner.Listener: Load OAuth token from .credentials
|
||||||
|
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token in .credentials
|
||||||
|
Actions Service->Runner.Listener: Workflow job
|
||||||
BIN
docs/res/self-hosted-config.png
Normal file
BIN
docs/res/self-hosted-config.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/res/self-hosted-start.png
Normal file
BIN
docs/res/self-hosted-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
BIN
docs/res/workflow-run.png
Normal file
BIN
docs/res/workflow-run.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 46 KiB |
@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
|
|||||||
- libssl1.1, libssl1.0.2 or libssl1.0.0
|
- libssl1.1, libssl1.0.2 or libssl1.0.0
|
||||||
- libicu63, libicu60, libicu57 or libicu55
|
- libicu63, libicu60, libicu57 or libicu55
|
||||||
|
|
||||||
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7)
|
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
|
||||||
|
|
||||||
- lttng-ust
|
- lttng-ust
|
||||||
- openssl-libs
|
- openssl-libs
|
||||||
|
|||||||
@@ -1,68 +1,67 @@
|
|||||||
## Features
|
## Features
|
||||||
- Remove runner flow: Change from PAT to "deletion token" in prompt (#225)
|
- N/A
|
||||||
- Expose github.run_id and github.run_number to action runtime env. (#224)
|
|
||||||
|
|
||||||
## Bugs
|
## Bugs
|
||||||
- Clean up error messages for container scenarios (#221)
|
- Fixed an issue where actions/checkout@v1 was not able to correctly set the working directory (#704)
|
||||||
- Pick shell from prependpath (#231)
|
|
||||||
|
|
||||||
## Misc
|
## Misc
|
||||||
- Runner code cleanup (#218 #227, #228, #229, #230)
|
- N/A
|
||||||
- Consume dotnet core 3.1 in runner. (#213)
|
|
||||||
|
|
||||||
## Windows x64
|
## Windows x64
|
||||||
We recommend configuring the runner under "<DRIVE>:\actions-runner". This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
|
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||||
```
|
|
||||||
// Create a folder under the drive root
|
The following snipped needs to be run on `powershell`:
|
||||||
|
``` powershell
|
||||||
|
# Create a folder under the drive root
|
||||||
mkdir \actions-runner ; cd \actions-runner
|
mkdir \actions-runner ; cd \actions-runner
|
||||||
// Download the latest runner package
|
# Download the latest runner package
|
||||||
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
|
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
|
||||||
// Extract the installer
|
# Extract the installer
|
||||||
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$HOME\Downloads\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||||
```
|
```
|
||||||
|
|
||||||
## OSX
|
## OSX
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
// Create a folder
|
# Create a folder
|
||||||
mkdir actions-runner && cd actions-runner
|
mkdir actions-runner && cd actions-runner
|
||||||
// Download the latest runner package
|
# Download the latest runner package
|
||||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||||
// Extract the installer
|
# Extract the installer
|
||||||
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||||
```
|
```
|
||||||
|
|
||||||
## Linux x64
|
## Linux x64
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
// Create a folder
|
# Create a folder
|
||||||
mkdir actions-runner && cd actions-runner
|
mkdir actions-runner && cd actions-runner
|
||||||
// Download the latest runner package
|
# Download the latest runner package
|
||||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||||
// Extract the installer
|
# Extract the installer
|
||||||
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||||
```
|
```
|
||||||
|
|
||||||
## Linux arm64 (Pre-release)
|
## Linux arm64 (Pre-release)
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
// Create a folder
|
# Create a folder
|
||||||
mkdir actions-runner && cd actions-runner
|
mkdir actions-runner && cd actions-runner
|
||||||
// Download the latest runner package
|
# Download the latest runner package
|
||||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||||
// Extract the installer
|
# Extract the installer
|
||||||
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||||
```
|
```
|
||||||
|
|
||||||
## Linux arm (Pre-release)
|
## Linux arm (Pre-release)
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
// Create a folder
|
# Create a folder
|
||||||
mkdir actions-runner && cd actions-runner
|
mkdir actions-runner && cd actions-runner
|
||||||
// Download the latest runner package
|
# Download the latest runner package
|
||||||
curl -O https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||||
// Extract the installer
|
# Extract the installer
|
||||||
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2.164.0
|
2.273.2
|
||||||
|
|||||||
4
scripts/README.md
Normal file
4
scripts/README.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Sample scripts for self-hosted runners
|
||||||
|
|
||||||
|
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
|
||||||
|
See the docs [here](../docs/automate.md).
|
||||||
147
scripts/create-latest-svc.sh
Executable file
147
scripts/create-latest-svc.sh
Executable file
@@ -0,0 +1,147 @@
|
|||||||
|
#/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
#
|
||||||
|
# Downloads latest releases (not pre-release) runner
|
||||||
|
# Configures as a service
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# export RUNNER_CFG_PAT=<yourPAT>
|
||||||
|
# ./create-latest-svc scope [ghe_domain] [name] [user]
|
||||||
|
#
|
||||||
|
# scope required repo (:owner/:repo) or org (:organization)
|
||||||
|
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
|
||||||
|
# name optional defaults to hostname
|
||||||
|
# user optional user svc will run as. defaults to current
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# PATS over envvars are more secure
|
||||||
|
# Should be used on VMs and not containers
|
||||||
|
# Works on OSX and Linux
|
||||||
|
# Assumes x64 arch
|
||||||
|
#
|
||||||
|
|
||||||
|
runner_scope=${1}
|
||||||
|
ghe_hostname=${2}
|
||||||
|
runner_name=${3:-$(hostname)}
|
||||||
|
svc_user=${4:-$USER}
|
||||||
|
|
||||||
|
echo "Configuring runner @ ${runner_scope}"
|
||||||
|
sudo echo
|
||||||
|
|
||||||
|
#---------------------------------------
|
||||||
|
# Validate Environment
|
||||||
|
#---------------------------------------
|
||||||
|
runner_plat=linux
|
||||||
|
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||||
|
|
||||||
|
function fatal()
|
||||||
|
{
|
||||||
|
echo "error: $1" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||||
|
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||||
|
|
||||||
|
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
|
||||||
|
# bail early if there's already a runner there. also sudo early
|
||||||
|
if [ -d ./runner ]; then
|
||||||
|
fatal "Runner already exists. Use a different directory or delete ./runner"
|
||||||
|
fi
|
||||||
|
|
||||||
|
sudo -u ${svc_user} mkdir runner
|
||||||
|
|
||||||
|
# TODO: validate not in a container
|
||||||
|
# TODO: validate systemd or osx svc installer
|
||||||
|
|
||||||
|
#--------------------------------------
|
||||||
|
# Get a config token
|
||||||
|
#--------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Generating a registration token..."
|
||||||
|
|
||||||
|
base_api_url="https://api.github.com"
|
||||||
|
if [ -n "${ghe_hostname}" ]; then
|
||||||
|
base_api_url="https://${ghe_hostname}/api/v3"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# if the scope has a slash, it's a repo runner
|
||||||
|
orgs_or_repos="orgs"
|
||||||
|
if [[ "$runner_scope" == *\/* ]]; then
|
||||||
|
orgs_or_repos="repos"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
|
||||||
|
|
||||||
|
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
|
||||||
|
|
||||||
|
#---------------------------------------
|
||||||
|
# Download latest released and extract
|
||||||
|
#---------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Downloading latest runner ..."
|
||||||
|
|
||||||
|
# For the GHES Alpha, download the runner from github.com
|
||||||
|
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||||
|
latest_version=$(echo ${latest_version_label:1})
|
||||||
|
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
||||||
|
|
||||||
|
if [ -f "${runner_file}" ]; then
|
||||||
|
echo "${runner_file} exists. skipping download."
|
||||||
|
else
|
||||||
|
runner_url="https://github.com/actions/runner/releases/download/${latest_version_label}/${runner_file}"
|
||||||
|
|
||||||
|
echo "Downloading ${latest_version_label} for ${runner_plat} ..."
|
||||||
|
echo $runner_url
|
||||||
|
|
||||||
|
curl -O -L ${runner_url}
|
||||||
|
fi
|
||||||
|
|
||||||
|
ls -la *.tar.gz
|
||||||
|
|
||||||
|
#---------------------------------------------------
|
||||||
|
# extract to runner directory in this directory
|
||||||
|
#---------------------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Extracting ${runner_file} to ./runner"
|
||||||
|
|
||||||
|
tar xzf "./${runner_file}" -C runner
|
||||||
|
|
||||||
|
# export of pass
|
||||||
|
sudo chown -R $svc_user ./runner
|
||||||
|
|
||||||
|
pushd ./runner
|
||||||
|
|
||||||
|
#---------------------------------------
|
||||||
|
# Unattend config
|
||||||
|
#---------------------------------------
|
||||||
|
runner_url="https://github.com/${runner_scope}"
|
||||||
|
if [ -n "${ghe_hostname}" ]; then
|
||||||
|
runner_url="https://${ghe_hostname}/${runner_scope}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Configuring ${runner_name} @ $runner_url"
|
||||||
|
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
|
||||||
|
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name
|
||||||
|
|
||||||
|
#---------------------------------------
|
||||||
|
# Configuring as a service
|
||||||
|
#---------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Configuring as a service ..."
|
||||||
|
prefix=""
|
||||||
|
if [ "${runner_plat}" == "linux" ]; then
|
||||||
|
prefix="sudo "
|
||||||
|
fi
|
||||||
|
|
||||||
|
${prefix}./svc.sh install ${svc_user}
|
||||||
|
${prefix}./svc.sh start
|
||||||
83
scripts/delete.sh
Executable file
83
scripts/delete.sh
Executable file
@@ -0,0 +1,83 @@
|
|||||||
|
#/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
#
|
||||||
|
# Force deletes a runner from the service
|
||||||
|
# The caller should have already ensured the runner is gone and/or stopped
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myuser/myrepo myname
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myorg
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# export RUNNER_CFG_PAT=<yourPAT>
|
||||||
|
# ./delete.sh scope name
|
||||||
|
#
|
||||||
|
# scope required repo (:owner/:repo) or org (:organization)
|
||||||
|
# name optional defaults to hostname. name to delete
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# PATS over envvars are more secure
|
||||||
|
# Works on OSX and Linux
|
||||||
|
# Assumes x64 arch
|
||||||
|
#
|
||||||
|
|
||||||
|
runner_scope=${1}
|
||||||
|
runner_name=${2}
|
||||||
|
|
||||||
|
echo "Deleting runner ${runner_name} @ ${runner_scope}"
|
||||||
|
|
||||||
|
function fatal()
|
||||||
|
{
|
||||||
|
echo "error: $1" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||||
|
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
|
||||||
|
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||||
|
|
||||||
|
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
|
||||||
|
base_api_url="https://api.github.com/orgs"
|
||||||
|
if [[ "$runner_scope" == *\/* ]]; then
|
||||||
|
base_api_url="https://api.github.com/repos"
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
#--------------------------------------
|
||||||
|
# Ensure offline
|
||||||
|
#--------------------------------------
|
||||||
|
runner_status=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
|
||||||
|
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].status")
|
||||||
|
|
||||||
|
if [ -z "${runner_status}" ]; then
|
||||||
|
fatal "Could not find runner with name ${runner_name}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Status: ${runner_status}"
|
||||||
|
|
||||||
|
if [ "${runner_status}" != "offline" ]; then
|
||||||
|
fatal "Runner should be offline before removing"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#--------------------------------------
|
||||||
|
# Get id of runner to remove
|
||||||
|
#--------------------------------------
|
||||||
|
runner_id=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
|
||||||
|
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].id")
|
||||||
|
|
||||||
|
if [ -z "${runner_id}" ]; then
|
||||||
|
fatal "Could not find runner with name ${runner_name}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Removing id ${runner_id}"
|
||||||
|
|
||||||
|
#--------------------------------------
|
||||||
|
# Remove the runner
|
||||||
|
#--------------------------------------
|
||||||
|
curl -s -X DELETE ${base_api_url}/${runner_scope}/actions/runners/${runner_id} -H "authorization: token ${RUNNER_CFG_PAT}"
|
||||||
|
|
||||||
|
echo "Done."
|
||||||
76
scripts/remove-svc.sh
Executable file
76
scripts/remove-svc.sh
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
#
|
||||||
|
# Removes a runner running as a service
|
||||||
|
# Must be run on the machine where the service is run
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myuser/myrepo
|
||||||
|
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myorg
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# export RUNNER_CFG_PAT=<yourPAT>
|
||||||
|
# ./remove-svc scope name
|
||||||
|
#
|
||||||
|
# scope required repo (:owner/:repo) or org (:organization)
|
||||||
|
# name optional defaults to hostname. name to uninstall and remove
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# PATS over envvars are more secure
|
||||||
|
# Should be used on VMs and not containers
|
||||||
|
# Works on OSX and Linux
|
||||||
|
# Assumes x64 arch
|
||||||
|
#
|
||||||
|
|
||||||
|
runner_scope=${1}
|
||||||
|
runner_name=${2:-$(hostname)}
|
||||||
|
|
||||||
|
echo "Uninstalling runner ${runner_name} @ ${runner_scope}"
|
||||||
|
sudo echo
|
||||||
|
|
||||||
|
function fatal()
|
||||||
|
{
|
||||||
|
echo "error: $1" >&2
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||||
|
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||||
|
|
||||||
|
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||||
|
|
||||||
|
runner_plat=linux
|
||||||
|
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||||
|
|
||||||
|
#--------------------------------------
|
||||||
|
# Get a remove token
|
||||||
|
#--------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Generating a removal token..."
|
||||||
|
|
||||||
|
# if the scope has a slash, it's an repo runner
|
||||||
|
base_api_url="https://api.github.com/orgs"
|
||||||
|
if [[ "$runner_scope" == *\/* ]]; then
|
||||||
|
base_api_url="https://api.github.com/repos"
|
||||||
|
fi
|
||||||
|
|
||||||
|
export REMOVE_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/remove-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
|
||||||
|
|
||||||
|
if [ -z "$REMOVE_TOKEN" ]; then fatal "Failed to get a token"; fi
|
||||||
|
|
||||||
|
#---------------------------------------
|
||||||
|
# Stop and uninstall the service
|
||||||
|
#---------------------------------------
|
||||||
|
echo
|
||||||
|
echo "Uninstall the service ..."
|
||||||
|
pushd ./runner
|
||||||
|
prefix=""
|
||||||
|
if [ "${runner_plat}" == "linux" ]; then
|
||||||
|
prefix="sudo "
|
||||||
|
fi
|
||||||
|
${prefix}./svc.sh stop
|
||||||
|
${prefix}./svc.sh uninstall
|
||||||
|
${prefix}./config.sh remove --token $REMOVE_TOKEN
|
||||||
233
src/Misc/dotnet-install.ps1
vendored
233
src/Misc/dotnet-install.ps1
vendored
@@ -69,6 +69,8 @@
|
|||||||
.PARAMETER ProxyUseDefaultCredentials
|
.PARAMETER ProxyUseDefaultCredentials
|
||||||
Default: false
|
Default: false
|
||||||
Use default credentials, when using proxy address.
|
Use default credentials, when using proxy address.
|
||||||
|
.PARAMETER ProxyBypassList
|
||||||
|
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
|
||||||
.PARAMETER SkipNonVersionedFiles
|
.PARAMETER SkipNonVersionedFiles
|
||||||
Default: false
|
Default: false
|
||||||
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
||||||
@@ -96,6 +98,7 @@ param(
|
|||||||
[string]$FeedCredential,
|
[string]$FeedCredential,
|
||||||
[string]$ProxyAddress,
|
[string]$ProxyAddress,
|
||||||
[switch]$ProxyUseDefaultCredentials,
|
[switch]$ProxyUseDefaultCredentials,
|
||||||
|
[string[]]$ProxyBypassList=@(),
|
||||||
[switch]$SkipNonVersionedFiles,
|
[switch]$SkipNonVersionedFiles,
|
||||||
[switch]$NoCdn
|
[switch]$NoCdn
|
||||||
)
|
)
|
||||||
@@ -119,11 +122,27 @@ $VersionRegEx="/\d+\.\d+[^/]+/"
|
|||||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||||
|
|
||||||
function Say($str) {
|
function Say($str) {
|
||||||
Write-Host "dotnet-install: $str"
|
try
|
||||||
|
{
|
||||||
|
Write-Host "dotnet-install: $str"
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||||
|
Write-Output "dotnet-install: $str"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function Say-Verbose($str) {
|
function Say-Verbose($str) {
|
||||||
Write-Verbose "dotnet-install: $str"
|
try
|
||||||
|
{
|
||||||
|
Write-Verbose "dotnet-install: $str"
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||||
|
Write-Output "dotnet-install: $str"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function Say-Invocation($Invocation) {
|
function Say-Invocation($Invocation) {
|
||||||
@@ -154,7 +173,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
|
|||||||
function Get-Machine-Architecture() {
|
function Get-Machine-Architecture() {
|
||||||
Say-Invocation $MyInvocation
|
Say-Invocation $MyInvocation
|
||||||
|
|
||||||
# possible values: amd64, x64, x86, arm64, arm
|
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
|
||||||
|
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
|
||||||
|
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
|
||||||
|
# Possible values: amd64, x64, x86, arm64, arm
|
||||||
|
|
||||||
|
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
|
||||||
|
{
|
||||||
|
return $ENV:PROCESSOR_ARCHITEW6432
|
||||||
|
}
|
||||||
|
|
||||||
return $ENV:PROCESSOR_ARCHITECTURE
|
return $ENV:PROCESSOR_ARCHITECTURE
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -228,7 +256,11 @@ function GetHTTPResponse([Uri] $Uri)
|
|||||||
|
|
||||||
if($ProxyAddress) {
|
if($ProxyAddress) {
|
||||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{Address=$ProxyAddress;UseDefaultCredentials=$ProxyUseDefaultCredentials}
|
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||||
|
Address=$ProxyAddress;
|
||||||
|
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||||
|
BypassList = $ProxyBypassList;
|
||||||
|
}
|
||||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -684,3 +716,196 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
|
|||||||
|
|
||||||
Say "Installation finished"
|
Say "Installation finished"
|
||||||
exit 0
|
exit 0
|
||||||
|
|
||||||
|
# SIG # Begin signature block
|
||||||
|
# MIIjlgYJKoZIhvcNAQcCoIIjhzCCI4MCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||||
|
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
|
||||||
|
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCXdb9pJ+MI1iFd
|
||||||
|
# 2hUVOaNmZYt6e48+bQNJm9/Rbj3u3qCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
|
||||||
|
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||||
|
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||||
|
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
|
||||||
|
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
|
||||||
|
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
|
||||||
|
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
|
||||||
|
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||||
|
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
|
||||||
|
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
|
||||||
|
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
|
||||||
|
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
|
||||||
|
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
|
||||||
|
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
|
||||||
|
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
|
||||||
|
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
|
||||||
|
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
|
||||||
|
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
|
||||||
|
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
|
||||||
|
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
|
||||||
|
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
|
||||||
|
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
|
||||||
|
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
|
||||||
|
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
|
||||||
|
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
|
||||||
|
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
|
||||||
|
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
|
||||||
|
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
|
||||||
|
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
|
||||||
|
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
|
||||||
|
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
|
||||||
|
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
|
||||||
|
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
|
||||||
|
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
|
||||||
|
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
|
||||||
|
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
|
||||||
|
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
|
||||||
|
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
|
||||||
|
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
|
||||||
|
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
|
||||||
|
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
|
||||||
|
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
|
||||||
|
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
|
||||||
|
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
|
||||||
|
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
|
||||||
|
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
|
||||||
|
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
|
||||||
|
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
|
||||||
|
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
|
||||||
|
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
|
||||||
|
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
|
||||||
|
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
|
||||||
|
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
|
||||||
|
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
|
||||||
|
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
|
||||||
|
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||||
|
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
|
||||||
|
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||||
|
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
|
||||||
|
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
|
||||||
|
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
|
||||||
|
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
|
||||||
|
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
|
||||||
|
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
|
||||||
|
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
|
||||||
|
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
|
||||||
|
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
|
||||||
|
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
|
||||||
|
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
|
||||||
|
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
|
||||||
|
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
|
||||||
|
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
|
||||||
|
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFWcwghVjAgEBMIGVMH4x
|
||||||
|
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||||
|
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
|
||||||
|
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
|
||||||
|
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
|
||||||
|
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIM9C
|
||||||
|
# NU8DMdIjlVSldghA1uP8Jf60AlCYNoHBHHW3pscjMEIGCisGAQQBgjcCAQwxNDAy
|
||||||
|
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||||
|
# b20wDQYJKoZIhvcNAQEBBQAEggEAFwdPmnUSAnwqMM8b4QthX44z3UnhPYm1EtjC
|
||||||
|
# /PnpTA5xkFMaoOUhGdiR5tpGPWNgiNRqD5ZSL1JVUqUOpNfybZZqZPz/LnZdS1XB
|
||||||
|
# +aj4Orh1Lkbaqq74PQxgRrUR3eyOVHcNTcohPNIb/ZYHqr6cwhqZitGuNEHNtqCk
|
||||||
|
# lSRCrfiNlW8PNrpPvUWwIC1Fd+OpgRdGhKFIHTx31if1BH8omViGm4iFdlb5dGz3
|
||||||
|
# ibeOm6FfXWwmKJVqVb/vhhemMel8tYNONTl2e+UjPOCy4f7myLiD61irA5T1a0vn
|
||||||
|
# vcIV0dRSwh8U5h8JYOEJxn4nydVKlJ5UGMS8eQiKdd42CGs93KGCEvEwghLtBgor
|
||||||
|
# BgEEAYI3AwMBMYIS3TCCEtkGCSqGSIb3DQEHAqCCEsowghLGAgEDMQ8wDQYJYIZI
|
||||||
|
# AWUDBAIBBQAwggFVBgsqhkiG9w0BCRABBKCCAUQEggFAMIIBPAIBAQYKKwYBBAGE
|
||||||
|
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCCVM7LRYercP7cfHmTrb7lPfKaZCdVbtga7
|
||||||
|
# UOM/oLAsHgIGXxb9UghEGBMyMDIwMDgxMzEyMjIwNS40NjZaMASAAgH0oIHUpIHR
|
||||||
|
# MIHOMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||||
|
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQL
|
||||||
|
# EyBNaWNyb3NvZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhh
|
||||||
|
# bGVzIFRTUyBFU046RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBU
|
||||||
|
# aW1lLVN0YW1wIFNlcnZpY2Wggg5EMIIE9TCCA92gAwIBAgITMwAAASWL3otsciYx
|
||||||
|
# 3QAAAAABJTANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||||
|
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
|
||||||
|
# IENvcnBvcmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0Eg
|
||||||
|
# MjAxMDAeFw0xOTEyMTkwMTE0NThaFw0yMTAzMTcwMTE0NThaMIHOMQswCQYDVQQG
|
||||||
|
# EwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwG
|
||||||
|
# A1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3NvZnQg
|
||||||
|
# T3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046
|
||||||
|
# RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNl
|
||||||
|
# cnZpY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDQex9jdmBb7OHJ
|
||||||
|
# wSYmMUorZNwAcv8Vy36TlJuzcVx7G+lFqt2zjWOMlSOMkm1XoAuJ8VZ5ShBedADX
|
||||||
|
# DGDKxHNZhLu3EW8x5ot/IOk6izLTlAFtvIXOgzXs/HaOM72XHKykMZHAdL/fpZtA
|
||||||
|
# SM5PalmsXX4Ol8lXkm9jR55K56C7q9+hDU+2tjGHaE1ZWlablNUXBhaZgtCJCd60
|
||||||
|
# UyZvgI7/uNzcafj0/Vw2bait9nDAVd24yt/XCZnHY3yX7ZsHjIuHpsl+PpDXai1D
|
||||||
|
# we9p0ryCZsl9SOMHextIHe9qlTbtWYJ8WtWLoH9dEMQxVLnmPPDOVmBj7LZhSji3
|
||||||
|
# 8N9Vpz/FAgMBAAGjggEbMIIBFzAdBgNVHQ4EFgQU86rK5Qcm+QE5NBXGCPIiCBdD
|
||||||
|
# JPgwHwYDVR0jBBgwFoAU1WM6XIoxkPNDe3xGG8UzaFqFbVUwVgYDVR0fBE8wTTBL
|
||||||
|
# oEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQuY29tL3BraS9jcmwvcHJvZHVjdHMv
|
||||||
|
# TWljVGltU3RhUENBXzIwMTAtMDctMDEuY3JsMFoGCCsGAQUFBwEBBE4wTDBKBggr
|
||||||
|
# BgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNU
|
||||||
|
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcnQwDAYDVR0TAQH/BAIwADATBgNVHSUEDDAK
|
||||||
|
# BggrBgEFBQcDCDANBgkqhkiG9w0BAQsFAAOCAQEAkxxZPGEgIgAhsqZNTZk58V1v
|
||||||
|
# QiJ5ja2xHl5TqGA6Hwj5SioLg3FSLiTmGV+BtFlpYUtkneB4jrZsuNpMtfbTMdG7
|
||||||
|
# p/xAyIVtwvXnTXqKlCD1T9Lcr94pVedzHGJzL1TYNQyZJBouCfzkgkzccOuFOfeW
|
||||||
|
# PfnMTiI5UBW5OdmoyHPQWDSGHoboW1dTKqXeJtuVDTYbHTKs4zjfCBMFjmylRu52
|
||||||
|
# Zpiz+9MBeRj4iAeou0F/3xvIzepoIKgUWCZ9mmViWEkVwCtTGbV8eK73KeEE0tfM
|
||||||
|
# U/YY2UmoGPc8YwburDEfelegLW+YHkfrcGAGlftCmqtOdOLeghLoG0Ubx/B7sTCC
|
||||||
|
# BnEwggRZoAMCAQICCmEJgSoAAAAAAAIwDQYJKoZIhvcNAQELBQAwgYgxCzAJBgNV
|
||||||
|
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
|
||||||
|
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xMjAwBgNVBAMTKU1pY3Jvc29m
|
||||||
|
# dCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAyMDEwMB4XDTEwMDcwMTIxMzY1
|
||||||
|
# NVoXDTI1MDcwMTIxNDY1NVowfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hp
|
||||||
|
# bmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jw
|
||||||
|
# b3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAw
|
||||||
|
# ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpHQ28dxGKOiDs/BOX9fp/
|
||||||
|
# aZRrdFQQ1aUKAIKF++18aEssX8XD5WHCdrc+Zitb8BVTJwQxH0EbGpUdzgkTjnxh
|
||||||
|
# MFmxMEQP8WCIhFRDDNdNuDgIs0Ldk6zWczBXJoKjRQ3Q6vVHgc2/JGAyWGBG8lhH
|
||||||
|
# hjKEHnRhZ5FfgVSxz5NMksHEpl3RYRNuKMYa+YaAu99h/EbBJx0kZxJyGiGKr0tk
|
||||||
|
# iVBisV39dx898Fd1rL2KQk1AUdEPnAY+Z3/1ZsADlkR+79BL/W7lmsqxqPJ6Kgox
|
||||||
|
# 8NpOBpG2iAg16HgcsOmZzTznL0S6p/TcZL2kAcEgCZN4zfy8wMlEXV4WnAEFTyJN
|
||||||
|
# AgMBAAGjggHmMIIB4jAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQU1WM6XIox
|
||||||
|
# kPNDe3xGG8UzaFqFbVUwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0P
|
||||||
|
# BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU1fZWy4/oolxiaNE9
|
||||||
|
# lJBb186aGMQwVgYDVR0fBE8wTTBLoEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQu
|
||||||
|
# Y29tL3BraS9jcmwvcHJvZHVjdHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3Js
|
||||||
|
# MFoGCCsGAQUFBwEBBE4wTDBKBggrBgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3Nv
|
||||||
|
# ZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcnQwgaAG
|
||||||
|
# A1UdIAEB/wSBlTCBkjCBjwYJKwYBBAGCNy4DMIGBMD0GCCsGAQUFBwIBFjFodHRw
|
||||||
|
# Oi8vd3d3Lm1pY3Jvc29mdC5jb20vUEtJL2RvY3MvQ1BTL2RlZmF1bHQuaHRtMEAG
|
||||||
|
# CCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAFAAbwBsAGkAYwB5AF8AUwB0AGEA
|
||||||
|
# dABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQAH5ohRDeLG4Jg/gXED
|
||||||
|
# PZ2joSFvs+umzPUxvs8F4qn++ldtGTCzwsVmyWrf9efweL3HqJ4l4/m87WtUVwgr
|
||||||
|
# UYJEEvu5U4zM9GASinbMQEBBm9xcF/9c+V4XNZgkVkt070IQyK+/f8Z/8jd9Wj8c
|
||||||
|
# 8pl5SpFSAK84Dxf1L3mBZdmptWvkx872ynoAb0swRCQiPM/tA6WWj1kpvLb9BOFw
|
||||||
|
# nzJKJ/1Vry/+tuWOM7tiX5rbV0Dp8c6ZZpCM/2pif93FSguRJuI57BlKcWOdeyFt
|
||||||
|
# w5yjojz6f32WapB4pm3S4Zz5Hfw42JT0xqUKloakvZ4argRCg7i1gJsiOCC1JeVk
|
||||||
|
# 7Pf0v35jWSUPei45V3aicaoGig+JFrphpxHLmtgOR5qAxdDNp9DvfYPw4TtxCd9d
|
||||||
|
# dJgiCGHasFAeb73x4QDf5zEHpJM692VHeOj4qEir995yfmFrb3epgcunCaw5u+zG
|
||||||
|
# y9iCtHLNHfS4hQEegPsbiSpUObJb2sgNVZl6h3M7COaYLeqN4DMuEin1wC9UJyH3
|
||||||
|
# yKxO2ii4sanblrKnQqLJzxlBTeCG+SqaoxFmMNO7dDJL32N79ZmKLxvHIa9Zta7c
|
||||||
|
# RDyXUHHXodLFVeNp3lfB0d4wwP3M5k37Db9dT+mdHhk4L7zPWAUu7w2gUDXa7wkn
|
||||||
|
# HNWzfjUeCLraNtvTX4/edIhJEqGCAtIwggI7AgEBMIH8oYHUpIHRMIHOMQswCQYD
|
||||||
|
# VQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEe
|
||||||
|
# MBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3Nv
|
||||||
|
# ZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBF
|
||||||
|
# U046RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1w
|
||||||
|
# IFNlcnZpY2WiIwoBATAHBgUrDgMCGgMVAEXTL+FQbc2G+3MXXvIRKVr2oXCnoIGD
|
||||||
|
# MIGApH4wfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNV
|
||||||
|
# BAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQG
|
||||||
|
# A1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEF
|
||||||
|
# BQACBQDi3yR1MCIYDzIwMjAwODEzMDYzMTE3WhgPMjAyMDA4MTQwNjMxMTdaMHcw
|
||||||
|
# PQYKKwYBBAGEWQoEATEvMC0wCgIFAOLfJHUCAQAwCgIBAAICKbYCAf8wBwIBAAIC
|
||||||
|
# EkQwCgIFAOLgdfUCAQAwNgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAK
|
||||||
|
# MAgCAQACAwehIKEKMAgCAQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQBI2hPSmSPK
|
||||||
|
# XurK36pE46s0uBEW23aGxotfubZR3iQCxDZ+dcZEN83t2JE4wh4a9HGpzXta/1Yz
|
||||||
|
# fgoIxgsI5wogRQF20sCD7x7ZTbpMweqxFCQSGRE8Z2B0FmntXXrEvQtS1ee0PC/1
|
||||||
|
# +eD7oAsVwmsSWdQHKfOVBqz51g2S+ImuzTGCAw0wggMJAgEBMIGTMHwxCzAJBgNV
|
||||||
|
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
|
||||||
|
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29m
|
||||||
|
# dCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABJYvei2xyJjHdAAAAAAElMA0GCWCG
|
||||||
|
# SAFlAwQCAQUAoIIBSjAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZI
|
||||||
|
# hvcNAQkEMSIEIJICFqJn2Gtkce4xbJqSJCqpNLdz4fjym2OW0Ac8zI+nMIH6Bgsq
|
||||||
|
# hkiG9w0BCRACLzGB6jCB5zCB5DCBvQQgXd/Gsi5vMF/6iX2CDh+VfmL5RvqaFkFw
|
||||||
|
# luiyje9B9w4wgZgwgYCkfjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGlu
|
||||||
|
# Z3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBv
|
||||||
|
# cmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAIT
|
||||||
|
# MwAAASWL3otsciYx3QAAAAABJTAiBCBSjc2CBOdr7iaTswYVN8f7KwiN5s4uBEO+
|
||||||
|
# JVI8WLhgFzANBgkqhkiG9w0BAQsFAASCAQCfsvzXMzAN1kylt4eAKSH4ryFIJqBH
|
||||||
|
# O7jcx7iIA9X6OPTuUmBniZGf2fmFG61V4HlmRgGOXuisJdpU3kiC7EZyFX6ZJoIj
|
||||||
|
# kgvCQf4BPu/cLtn2w6odZ68OrTHs7BfBKBr6eQKKcZ/kgRSsjMNinh8tHPlrxE63
|
||||||
|
# Zha3mUFfsnX5bi+F4VPhluGvRuA7q3IqMzfA/dTxON9WH5L+t3TwW61VebBaSPkT
|
||||||
|
# YevYlj0TTlCw1B3zk0ztU37uulqDi4rFr67VaoR3qrhL/xZ/DsaNXg1V/RXqQRrw
|
||||||
|
# eCag1OFRASAQOUOlWSi0QtYgUDl5FKKzxaJTEd946+6mJIkNXZB3nmA1
|
||||||
|
# SIG # End signature block
|
||||||
|
|||||||
13
src/Misc/dotnet-install.sh
vendored
13
src/Misc/dotnet-install.sh
vendored
@@ -172,7 +172,7 @@ get_current_os_name() {
|
|||||||
return 0
|
return 0
|
||||||
elif [ "$uname" = "FreeBSD" ]; then
|
elif [ "$uname" = "FreeBSD" ]; then
|
||||||
echo "freebsd"
|
echo "freebsd"
|
||||||
return 0
|
return 0
|
||||||
elif [ "$uname" = "Linux" ]; then
|
elif [ "$uname" = "Linux" ]; then
|
||||||
local linux_platform_name
|
local linux_platform_name
|
||||||
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
|
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
|
||||||
@@ -728,11 +728,12 @@ downloadcurl() {
|
|||||||
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
||||||
remote_path="${remote_path}${feed_credential}"
|
remote_path="${remote_path}${feed_credential}"
|
||||||
|
|
||||||
|
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
|
||||||
local failed=false
|
local failed=false
|
||||||
if [ -z "$out_path" ]; then
|
if [ -z "$out_path" ]; then
|
||||||
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
|
curl $curl_options "$remote_path" || failed=true
|
||||||
else
|
else
|
||||||
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
|
curl $curl_options -o "$out_path" "$remote_path" || failed=true
|
||||||
fi
|
fi
|
||||||
if [ "$failed" = true ]; then
|
if [ "$failed" = true ]; then
|
||||||
say_verbose "Curl download failed"
|
say_verbose "Curl download failed"
|
||||||
@@ -748,12 +749,12 @@ downloadwget() {
|
|||||||
|
|
||||||
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
|
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
|
||||||
remote_path="${remote_path}${feed_credential}"
|
remote_path="${remote_path}${feed_credential}"
|
||||||
|
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
|
||||||
local failed=false
|
local failed=false
|
||||||
if [ -z "$out_path" ]; then
|
if [ -z "$out_path" ]; then
|
||||||
wget -q --tries 10 -O - "$remote_path" || failed=true
|
wget -q $wget_options -O - "$remote_path" || failed=true
|
||||||
else
|
else
|
||||||
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
|
wget $wget_options -O "$out_path" "$remote_path" || failed=true
|
||||||
fi
|
fi
|
||||||
if [ "$failed" = true ]; then
|
if [ "$failed" = true ]; then
|
||||||
say_verbose "Wget download failed"
|
say_verbose "Wget download failed"
|
||||||
|
|||||||
3
src/Misc/expressionFunc/hashFiles/.eslintignore
Normal file
3
src/Misc/expressionFunc/hashFiles/.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
dist/
|
||||||
|
lib/
|
||||||
|
node_modules/
|
||||||
59
src/Misc/expressionFunc/hashFiles/.eslintrc.json
Normal file
59
src/Misc/expressionFunc/hashFiles/.eslintrc.json
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
{
|
||||||
|
"plugins": ["jest", "@typescript-eslint"],
|
||||||
|
"extends": ["plugin:github/es6"],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 9,
|
||||||
|
"sourceType": "module",
|
||||||
|
"project": "./tsconfig.json"
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"eslint-comments/no-use": "off",
|
||||||
|
"import/no-namespace": "off",
|
||||||
|
"no-console": "off",
|
||||||
|
"no-unused-vars": "off",
|
||||||
|
"@typescript-eslint/no-unused-vars": "error",
|
||||||
|
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||||
|
"@typescript-eslint/no-require-imports": "error",
|
||||||
|
"@typescript-eslint/array-type": "error",
|
||||||
|
"@typescript-eslint/await-thenable": "error",
|
||||||
|
"@typescript-eslint/ban-ts-ignore": "error",
|
||||||
|
"camelcase": "off",
|
||||||
|
"@typescript-eslint/camelcase": "error",
|
||||||
|
"@typescript-eslint/class-name-casing": "error",
|
||||||
|
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||||
|
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||||
|
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
|
||||||
|
"@typescript-eslint/no-array-constructor": "error",
|
||||||
|
"@typescript-eslint/no-empty-interface": "error",
|
||||||
|
"@typescript-eslint/no-explicit-any": "error",
|
||||||
|
"@typescript-eslint/no-extraneous-class": "error",
|
||||||
|
"@typescript-eslint/no-for-in-array": "error",
|
||||||
|
"@typescript-eslint/no-inferrable-types": "error",
|
||||||
|
"@typescript-eslint/no-misused-new": "error",
|
||||||
|
"@typescript-eslint/no-namespace": "error",
|
||||||
|
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||||
|
"@typescript-eslint/no-object-literal-type-assertion": "error",
|
||||||
|
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||||
|
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||||
|
"@typescript-eslint/no-useless-constructor": "error",
|
||||||
|
"@typescript-eslint/no-var-requires": "error",
|
||||||
|
"@typescript-eslint/prefer-for-of": "warn",
|
||||||
|
"@typescript-eslint/prefer-function-type": "warn",
|
||||||
|
"@typescript-eslint/prefer-includes": "error",
|
||||||
|
"@typescript-eslint/prefer-interface": "error",
|
||||||
|
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||||
|
"@typescript-eslint/promise-function-async": "error",
|
||||||
|
"@typescript-eslint/require-array-sort-compare": "error",
|
||||||
|
"@typescript-eslint/restrict-plus-operands": "error",
|
||||||
|
"semi": "off",
|
||||||
|
"@typescript-eslint/semi": ["error", "never"],
|
||||||
|
"@typescript-eslint/type-annotation-spacing": "error",
|
||||||
|
"@typescript-eslint/unbound-method": "error"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"es6": true,
|
||||||
|
"jest/globals": true
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/Misc/expressionFunc/hashFiles/.prettierignore
Normal file
3
src/Misc/expressionFunc/hashFiles/.prettierignore
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
dist/
|
||||||
|
lib/
|
||||||
|
node_modules/
|
||||||
11
src/Misc/expressionFunc/hashFiles/.prettierrc.json
Normal file
11
src/Misc/expressionFunc/hashFiles/.prettierrc.json
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"printWidth": 80,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "none",
|
||||||
|
"bracketSpacing": false,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"parser": "typescript"
|
||||||
|
}
|
||||||
1
src/Misc/expressionFunc/hashFiles/README.md
Normal file
1
src/Misc/expressionFunc/hashFiles/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
To update hashFiles under `Misc/layoutbin` run `npm install && npm run all`
|
||||||
2616
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
Normal file
2616
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
35
src/Misc/expressionFunc/hashFiles/package.json
Normal file
35
src/Misc/expressionFunc/hashFiles/package.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "hashFiles",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "GitHub Actions HashFiles() expression function",
|
||||||
|
"main": "lib/hashFiles.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"format": "prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check **/*.ts",
|
||||||
|
"lint": "eslint src/**/*.ts",
|
||||||
|
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||||
|
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/actions/runner.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"actions"
|
||||||
|
],
|
||||||
|
"author": "GitHub Actions",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/glob": "^0.1.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^12.7.12",
|
||||||
|
"@typescript-eslint/parser": "^2.8.0",
|
||||||
|
"@zeit/ncc": "^0.20.5",
|
||||||
|
"eslint": "^6.8.0",
|
||||||
|
"eslint-plugin-github": "^2.0.0",
|
||||||
|
"prettier": "^1.19.1",
|
||||||
|
"typescript": "^3.6.4"
|
||||||
|
}
|
||||||
|
}
|
||||||
55
src/Misc/expressionFunc/hashFiles/src/hashFiles.ts
Normal file
55
src/Misc/expressionFunc/hashFiles/src/hashFiles.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
import * as glob from '@actions/glob'
|
||||||
|
import * as crypto from 'crypto'
|
||||||
|
import * as fs from 'fs'
|
||||||
|
import * as stream from 'stream'
|
||||||
|
import * as util from 'util'
|
||||||
|
import * as path from 'path'
|
||||||
|
|
||||||
|
async function run(): Promise<void> {
|
||||||
|
// arg0 -> node
|
||||||
|
// arg1 -> hashFiles.js
|
||||||
|
// env[followSymbolicLinks] = true/null
|
||||||
|
// env[patterns] -> glob patterns
|
||||||
|
let followSymbolicLinks = false
|
||||||
|
const matchPatterns = process.env.patterns || ''
|
||||||
|
if (process.env.followSymbolicLinks === 'true') {
|
||||||
|
console.log('Follow symbolic links')
|
||||||
|
followSymbolicLinks = true
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Match Pattern: ${matchPatterns}`)
|
||||||
|
let hasMatch = false
|
||||||
|
const githubWorkspace = process.cwd()
|
||||||
|
const result = crypto.createHash('sha256')
|
||||||
|
let count = 0
|
||||||
|
const globber = await glob.create(matchPatterns, {followSymbolicLinks})
|
||||||
|
for await (const file of globber.globGenerator()) {
|
||||||
|
console.log(file)
|
||||||
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
|
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (fs.statSync(file).isDirectory()) {
|
||||||
|
console.log(`Skip directory '${file}'.`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const hash = crypto.createHash('sha256')
|
||||||
|
const pipeline = util.promisify(stream.pipeline)
|
||||||
|
await pipeline(fs.createReadStream(file), hash)
|
||||||
|
result.write(hash.digest())
|
||||||
|
count++
|
||||||
|
if (!hasMatch) {
|
||||||
|
hasMatch = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.end()
|
||||||
|
|
||||||
|
if (hasMatch) {
|
||||||
|
console.log(`Find ${count} files to hash.`)
|
||||||
|
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
|
||||||
|
} else {
|
||||||
|
console.error(`__OUTPUT____OUTPUT__`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
run()
|
||||||
12
src/Misc/expressionFunc/hashFiles/tsconfig.json
Normal file
12
src/Misc/expressionFunc/hashFiles/tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||||
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||||
|
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||||
|
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
"strict": true, /* Enable all strict type-checking options. */
|
||||||
|
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||||
|
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||||
|
},
|
||||||
|
"exclude": ["node_modules", "**/*.test.ts"]
|
||||||
|
}
|
||||||
@@ -23,5 +23,7 @@
|
|||||||
<key>ACTIONS_RUNNER_SVC</key>
|
<key>ACTIONS_RUNNER_SVC</key>
|
||||||
<string>1</string>
|
<string>1</string>
|
||||||
</dict>
|
</dict>
|
||||||
|
<key>ProcessType</key>
|
||||||
|
<string>Interactive</string>
|
||||||
</dict>
|
</dict>
|
||||||
</plist>
|
</plist>
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
SVC_NAME="{{SvcNameVar}}"
|
SVC_NAME="{{SvcNameVar}}"
|
||||||
|
SVC_NAME=${SVC_NAME// /_}
|
||||||
SVC_DESCRIPTION="{{SvcDescription}}"
|
SVC_DESCRIPTION="{{SvcDescription}}"
|
||||||
|
|
||||||
user_id=`id -u`
|
user_id=`id -u`
|
||||||
|
|||||||
2623
src/Misc/layoutbin/hashFiles/index.js
Normal file
2623
src/Misc/layoutbin/hashFiles/index.js
Normal file
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@ fi
|
|||||||
|
|
||||||
# Determine OS type
|
# Determine OS type
|
||||||
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
|
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
|
||||||
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release
|
# Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
|
||||||
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
|
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
|
||||||
|
|
||||||
function print_errormessage()
|
function print_errormessage()
|
||||||
@@ -70,8 +70,8 @@ then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||||
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||||
if [ $? -ne 0 ]
|
if [ $? -ne 0 ]
|
||||||
then
|
then
|
||||||
echo "'apt' failed with exit code '$?'"
|
echo "'apt' failed with exit code '$?'"
|
||||||
@@ -99,8 +99,8 @@ then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||||
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||||
if [ $? -ne 0 ]
|
if [ $? -ne 0 ]
|
||||||
then
|
then
|
||||||
echo "'apt-get' failed with exit code '$?'"
|
echo "'apt-get' failed with exit code '$?'"
|
||||||
@@ -116,12 +116,12 @@ then
|
|||||||
elif [ -e /etc/redhat-release ]
|
elif [ -e /etc/redhat-release ]
|
||||||
then
|
then
|
||||||
echo "The current OS is Fedora based"
|
echo "The current OS is Fedora based"
|
||||||
echo "--------Redhat Version--------"
|
echo "--Fedora/RHEL/CentOS Version--"
|
||||||
cat /etc/redhat-release
|
cat /etc/redhat-release
|
||||||
echo "------------------------------"
|
echo "------------------------------"
|
||||||
|
|
||||||
# use dnf on fedora
|
# use dnf on fedora
|
||||||
# use yum on centos and redhat
|
# use yum on centos and rhel
|
||||||
if [ -e /etc/fedora-release ]
|
if [ -e /etc/fedora-release ]
|
||||||
then
|
then
|
||||||
command -v dnf
|
command -v dnf
|
||||||
@@ -191,7 +191,7 @@ then
|
|||||||
redhatRelease=$(</etc/redhat-release)
|
redhatRelease=$(</etc/redhat-release)
|
||||||
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
|
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
|
||||||
then
|
then
|
||||||
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6"
|
echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
|
||||||
|
|
||||||
# Install known dependencies, as a best effort.
|
# Install known dependencies, as a best effort.
|
||||||
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`
|
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`
|
||||||
|
|||||||
13
src/Misc/layoutbin/macos-run-invoker.js
Normal file
13
src/Misc/layoutbin/macos-run-invoker.js
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
const { spawn } = require('child_process');
|
||||||
|
// argv[0] = node
|
||||||
|
// argv[1] = macos-run-invoker.js
|
||||||
|
var shell = process.argv[2];
|
||||||
|
var args = process.argv.slice(3);
|
||||||
|
console.log(`::debug::macos-run-invoker: ${shell}`);
|
||||||
|
console.log(`::debug::macos-run-invoker: ${JSON.stringify(args)}`);
|
||||||
|
var launch = spawn(shell, args, { stdio: 'inherit' });
|
||||||
|
launch.on('exit', function (code) {
|
||||||
|
if (code !== 0) {
|
||||||
|
process.exit(code);
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
SVC_NAME="{{SvcNameVar}}"
|
SVC_NAME="{{SvcNameVar}}"
|
||||||
|
SVC_NAME=${SVC_NAME// /_}
|
||||||
SVC_DESCRIPTION="{{SvcDescription}}"
|
SVC_DESCRIPTION="{{SvcDescription}}"
|
||||||
|
|
||||||
SVC_CMD=$1
|
SVC_CMD=$1
|
||||||
@@ -62,12 +63,25 @@ function install()
|
|||||||
|
|
||||||
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||||
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
|
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
|
||||||
|
|
||||||
|
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
|
||||||
|
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
|
||||||
|
command -v getenforce > /dev/null
|
||||||
|
if [ $? -eq 0 ]
|
||||||
|
then
|
||||||
|
selinuxEnabled=$(getenforce)
|
||||||
|
if [[ $selinuxEnabled == "Enforcing" ]]
|
||||||
|
then
|
||||||
|
# SELinux is enabled, we will need to Restore SELinux Context for the service file
|
||||||
|
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# unit file should not be executable and world writable
|
# unit file should not be executable and world writable
|
||||||
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
|
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
|
||||||
systemctl daemon-reload || failed "failed to reload daemons"
|
systemctl daemon-reload || failed "failed to reload daemons"
|
||||||
|
|
||||||
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
|
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
|
||||||
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
|
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
|
||||||
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
|
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
|
||||||
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"
|
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
|||||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||||
done
|
done
|
||||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||||
cd $DIR
|
cd "$DIR"
|
||||||
|
|
||||||
source ./env.sh
|
source ./env.sh
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,6 @@
|
|||||||
<packageSources>
|
<packageSources>
|
||||||
<!--To inherit the global NuGet package sources remove the <clear/> line below -->
|
<!--To inherit the global NuGet package sources remove the <clear/> line below -->
|
||||||
<clear />
|
<clear />
|
||||||
<add key="dotnet-core" value="https://www.myget.org/F/dotnet-core/api/v3/index.json" />
|
|
||||||
<add key="dotnet-buildtools" value="https://www.myget.org/F/dotnet-buildtools/api/v3/index.json" />
|
|
||||||
<add key="api.nuget.org" value="https://api.nuget.org/v3/index.json" />
|
<add key="api.nuget.org" value="https://api.nuget.org/v3/index.json" />
|
||||||
</packageSources>
|
</packageSources>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|||||||
@@ -9,26 +9,27 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
private static readonly EscapeMapping[] _escapeMappings = new[]
|
private static readonly EscapeMapping[] _escapeMappings = new[]
|
||||||
{
|
{
|
||||||
new EscapeMapping(token: "%", replacement: "%25"),
|
|
||||||
new EscapeMapping(token: ";", replacement: "%3B"),
|
new EscapeMapping(token: ";", replacement: "%3B"),
|
||||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||||
new EscapeMapping(token: "]", replacement: "%5D"),
|
new EscapeMapping(token: "]", replacement: "%5D"),
|
||||||
|
new EscapeMapping(token: "%", replacement: "%25"),
|
||||||
};
|
};
|
||||||
|
|
||||||
private static readonly EscapeMapping[] _escapeDataMappings = new[]
|
private static readonly EscapeMapping[] _escapeDataMappings = new[]
|
||||||
{
|
{
|
||||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||||
|
new EscapeMapping(token: "%", replacement: "%25"),
|
||||||
};
|
};
|
||||||
|
|
||||||
private static readonly EscapeMapping[] _escapePropertyMappings = new[]
|
private static readonly EscapeMapping[] _escapePropertyMappings = new[]
|
||||||
{
|
{
|
||||||
new EscapeMapping(token: "%", replacement: "%25"),
|
|
||||||
new EscapeMapping(token: "\r", replacement: "%0D"),
|
new EscapeMapping(token: "\r", replacement: "%0D"),
|
||||||
new EscapeMapping(token: "\n", replacement: "%0A"),
|
new EscapeMapping(token: "\n", replacement: "%0A"),
|
||||||
new EscapeMapping(token: ":", replacement: "%3A"),
|
new EscapeMapping(token: ":", replacement: "%3A"),
|
||||||
new EscapeMapping(token: ",", replacement: "%2C"),
|
new EscapeMapping(token: ",", replacement: "%2C"),
|
||||||
|
new EscapeMapping(token: "%", replacement: "%25"),
|
||||||
};
|
};
|
||||||
|
|
||||||
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|||||||
@@ -15,6 +15,9 @@ namespace GitHub.Runner.Common
|
|||||||
[DataContract]
|
[DataContract]
|
||||||
public sealed class RunnerSettings
|
public sealed class RunnerSettings
|
||||||
{
|
{
|
||||||
|
[DataMember(Name = "IsHostedServer", EmitDefaultValue = false)]
|
||||||
|
private bool? _isHostedServer;
|
||||||
|
|
||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public int AgentId { get; set; }
|
public int AgentId { get; set; }
|
||||||
|
|
||||||
@@ -42,6 +45,21 @@ namespace GitHub.Runner.Common
|
|||||||
[DataMember(EmitDefaultValue = false)]
|
[DataMember(EmitDefaultValue = false)]
|
||||||
public string MonitorSocketAddress { get; set; }
|
public string MonitorSocketAddress { get; set; }
|
||||||
|
|
||||||
|
[IgnoreDataMember]
|
||||||
|
public bool IsHostedServer
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
||||||
|
return _isHostedServer ?? true;
|
||||||
|
}
|
||||||
|
|
||||||
|
set
|
||||||
|
{
|
||||||
|
_isHostedServer = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
// Computed property for convenience. Can either return:
|
// Computed property for convenience. Can either return:
|
||||||
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
|
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
|
||||||
@@ -69,6 +87,15 @@ namespace GitHub.Runner.Common
|
|||||||
return repoOrOrgName;
|
return repoOrOrgName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[OnSerializing]
|
||||||
|
private void OnSerializing(StreamingContext context)
|
||||||
|
{
|
||||||
|
if (_isHostedServer.HasValue && _isHostedServer.Value)
|
||||||
|
{
|
||||||
|
_isHostedServer = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[ServiceLocator(Default = typeof(ConfigurationStore))]
|
[ServiceLocator(Default = typeof(ConfigurationStore))]
|
||||||
@@ -78,10 +105,12 @@ namespace GitHub.Runner.Common
|
|||||||
bool IsServiceConfigured();
|
bool IsServiceConfigured();
|
||||||
bool HasCredentials();
|
bool HasCredentials();
|
||||||
CredentialData GetCredentials();
|
CredentialData GetCredentials();
|
||||||
|
CredentialData GetMigratedCredentials();
|
||||||
RunnerSettings GetSettings();
|
RunnerSettings GetSettings();
|
||||||
void SaveCredential(CredentialData credential);
|
void SaveCredential(CredentialData credential);
|
||||||
void SaveSettings(RunnerSettings settings);
|
void SaveSettings(RunnerSettings settings);
|
||||||
void DeleteCredential();
|
void DeleteCredential();
|
||||||
|
void DeleteMigratedCredential();
|
||||||
void DeleteSettings();
|
void DeleteSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,9 +119,11 @@ namespace GitHub.Runner.Common
|
|||||||
private string _binPath;
|
private string _binPath;
|
||||||
private string _configFilePath;
|
private string _configFilePath;
|
||||||
private string _credFilePath;
|
private string _credFilePath;
|
||||||
|
private string _migratedCredFilePath;
|
||||||
private string _serviceConfigFilePath;
|
private string _serviceConfigFilePath;
|
||||||
|
|
||||||
private CredentialData _creds;
|
private CredentialData _creds;
|
||||||
|
private CredentialData _migratedCreds;
|
||||||
private RunnerSettings _settings;
|
private RunnerSettings _settings;
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
@@ -114,6 +145,9 @@ namespace GitHub.Runner.Common
|
|||||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||||
|
|
||||||
|
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
|
||||||
|
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
|
||||||
|
|
||||||
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
|
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
|
||||||
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
|
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
|
||||||
}
|
}
|
||||||
@@ -123,7 +157,7 @@ namespace GitHub.Runner.Common
|
|||||||
public bool HasCredentials()
|
public bool HasCredentials()
|
||||||
{
|
{
|
||||||
Trace.Info("HasCredentials()");
|
Trace.Info("HasCredentials()");
|
||||||
bool credsStored = (new FileInfo(_credFilePath)).Exists;
|
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
||||||
Trace.Info("stored {0}", credsStored);
|
Trace.Info("stored {0}", credsStored);
|
||||||
return credsStored;
|
return credsStored;
|
||||||
}
|
}
|
||||||
@@ -154,6 +188,16 @@ namespace GitHub.Runner.Common
|
|||||||
return _creds;
|
return _creds;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public CredentialData GetMigratedCredentials()
|
||||||
|
{
|
||||||
|
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
|
||||||
|
{
|
||||||
|
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
return _migratedCreds;
|
||||||
|
}
|
||||||
|
|
||||||
public RunnerSettings GetSettings()
|
public RunnerSettings GetSettings()
|
||||||
{
|
{
|
||||||
if (_settings == null)
|
if (_settings == null)
|
||||||
@@ -206,6 +250,12 @@ namespace GitHub.Runner.Common
|
|||||||
public void DeleteCredential()
|
public void DeleteCredential()
|
||||||
{
|
{
|
||||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||||
|
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void DeleteMigratedCredential()
|
||||||
|
{
|
||||||
|
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void DeleteSettings()
|
public void DeleteSettings()
|
||||||
|
|||||||
@@ -19,11 +19,13 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Runner,
|
Runner,
|
||||||
Credentials,
|
Credentials,
|
||||||
|
MigratedCredentials,
|
||||||
RSACredentials,
|
RSACredentials,
|
||||||
Service,
|
Service,
|
||||||
CredentialStore,
|
CredentialStore,
|
||||||
Certificates,
|
Certificates,
|
||||||
Options,
|
Options,
|
||||||
|
SetupInfo,
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Constants
|
public static class Constants
|
||||||
@@ -85,9 +87,10 @@ namespace GitHub.Runner.Common
|
|||||||
public static class Args
|
public static class Args
|
||||||
{
|
{
|
||||||
public static readonly string Auth = "auth";
|
public static readonly string Auth = "auth";
|
||||||
|
public static readonly string Labels = "labels";
|
||||||
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
|
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
|
||||||
public static readonly string Name = "name";
|
public static readonly string Name = "name";
|
||||||
public static readonly string Pool = "pool";
|
public static readonly string RunnerGroup = "runnergroup";
|
||||||
public static readonly string StartupType = "startuptype";
|
public static readonly string StartupType = "startuptype";
|
||||||
public static readonly string Url = "url";
|
public static readonly string Url = "url";
|
||||||
public static readonly string UserName = "username";
|
public static readonly string UserName = "username";
|
||||||
@@ -134,6 +137,15 @@ namespace GitHub.Runner.Common
|
|||||||
public const int RunnerUpdating = 3;
|
public const int RunnerUpdating = 3;
|
||||||
public const int RunOnceRunnerUpdating = 4;
|
public const int RunOnceRunnerUpdating = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||||
|
public static readonly string WorkerCrash = "WORKER_CRASH";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class RunnerEvent
|
||||||
|
{
|
||||||
|
public static readonly string Register = "register";
|
||||||
|
public static readonly string Remove = "remove";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Pipeline
|
public static class Pipeline
|
||||||
@@ -162,7 +174,8 @@ namespace GitHub.Runner.Common
|
|||||||
public static class Path
|
public static class Path
|
||||||
{
|
{
|
||||||
public static readonly string ActionsDirectory = "_actions";
|
public static readonly string ActionsDirectory = "_actions";
|
||||||
public static readonly string ActionManifestFile = "action.yml";
|
public static readonly string ActionManifestYmlFile = "action.yml";
|
||||||
|
public static readonly string ActionManifestYamlFile = "action.yaml";
|
||||||
public static readonly string BinDirectory = "bin";
|
public static readonly string BinDirectory = "bin";
|
||||||
public static readonly string DiagDirectory = "_diag";
|
public static readonly string DiagDirectory = "_diag";
|
||||||
public static readonly string ExternalsDirectory = "externals";
|
public static readonly string ExternalsDirectory = "externals";
|
||||||
|
|||||||
@@ -56,6 +56,10 @@ namespace GitHub.Runner.Common
|
|||||||
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
|
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
|
||||||
Add<T>(extensions, "GitHub.Runner.Worker.EchoCommandExtension, Runner.Worker");
|
Add<T>(extensions, "GitHub.Runner.Worker.EchoCommandExtension, Runner.Worker");
|
||||||
break;
|
break;
|
||||||
|
case "GitHub.Runner.Worker.IFileCommandExtension":
|
||||||
|
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||||
|
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
// This should never happen.
|
// This should never happen.
|
||||||
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");
|
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");
|
||||||
|
|||||||
@@ -1,19 +1,18 @@
|
|||||||
using GitHub.Runner.Common.Util;
|
using System;
|
||||||
using System;
|
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Diagnostics;
|
||||||
|
using System.Diagnostics.Tracing;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Net.Http.Headers;
|
||||||
using System.Reflection;
|
using System.Reflection;
|
||||||
using System.Runtime.Loader;
|
using System.Runtime.Loader;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using System.Diagnostics;
|
|
||||||
using System.Net.Http;
|
|
||||||
using System.Diagnostics.Tracing;
|
|
||||||
using GitHub.DistributedTask.Logging;
|
using GitHub.DistributedTask.Logging;
|
||||||
using System.Net.Http.Headers;
|
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common
|
namespace GitHub.Runner.Common
|
||||||
@@ -24,7 +23,7 @@ namespace GitHub.Runner.Common
|
|||||||
CancellationToken RunnerShutdownToken { get; }
|
CancellationToken RunnerShutdownToken { get; }
|
||||||
ShutdownReason RunnerShutdownReason { get; }
|
ShutdownReason RunnerShutdownReason { get; }
|
||||||
ISecretMasker SecretMasker { get; }
|
ISecretMasker SecretMasker { get; }
|
||||||
ProductInfoHeaderValue UserAgent { get; }
|
List<ProductInfoHeaderValue> UserAgents { get; }
|
||||||
RunnerWebProxy WebProxy { get; }
|
RunnerWebProxy WebProxy { get; }
|
||||||
string GetDirectory(WellKnownDirectory directory);
|
string GetDirectory(WellKnownDirectory directory);
|
||||||
string GetConfigFile(WellKnownConfigFile configFile);
|
string GetConfigFile(WellKnownConfigFile configFile);
|
||||||
@@ -54,7 +53,7 @@ namespace GitHub.Runner.Common
|
|||||||
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
|
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
|
||||||
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
|
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
|
||||||
private readonly ISecretMasker _secretMasker = new SecretMasker();
|
private readonly ISecretMasker _secretMasker = new SecretMasker();
|
||||||
private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version);
|
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
|
||||||
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
|
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
|
||||||
private object _perfLock = new object();
|
private object _perfLock = new object();
|
||||||
private Tracing _trace;
|
private Tracing _trace;
|
||||||
@@ -72,7 +71,7 @@ namespace GitHub.Runner.Common
|
|||||||
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
||||||
public ShutdownReason RunnerShutdownReason { get; private set; }
|
public ShutdownReason RunnerShutdownReason { get; private set; }
|
||||||
public ISecretMasker SecretMasker => _secretMasker;
|
public ISecretMasker SecretMasker => _secretMasker;
|
||||||
public ProductInfoHeaderValue UserAgent => _userAgent;
|
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
||||||
public RunnerWebProxy WebProxy => _webProxy;
|
public RunnerWebProxy WebProxy => _webProxy;
|
||||||
public HostContext(string hostType, string logFile = null)
|
public HostContext(string hostType, string logFile = null)
|
||||||
{
|
{
|
||||||
@@ -89,6 +88,7 @@ namespace GitHub.Runner.Common
|
|||||||
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
|
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
|
||||||
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
|
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
|
||||||
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
|
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
|
||||||
|
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
|
||||||
|
|
||||||
// Create the trace manager.
|
// Create the trace manager.
|
||||||
if (string.IsNullOrEmpty(logFile))
|
if (string.IsNullOrEmpty(logFile))
|
||||||
@@ -189,6 +189,17 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
|
if (File.Exists(credFile))
|
||||||
|
{
|
||||||
|
var credData = IOUtil.LoadObject<CredentialData>(credFile);
|
||||||
|
if (credData != null &&
|
||||||
|
credData.Data.TryGetValue("clientId", out var clientId))
|
||||||
|
{
|
||||||
|
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public string GetDirectory(WellKnownDirectory directory)
|
public string GetDirectory(WellKnownDirectory directory)
|
||||||
@@ -281,6 +292,12 @@ namespace GitHub.Runner.Common
|
|||||||
".credentials");
|
".credentials");
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case WellKnownConfigFile.MigratedCredentials:
|
||||||
|
path = Path.Combine(
|
||||||
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
|
".credentials_migrated");
|
||||||
|
break;
|
||||||
|
|
||||||
case WellKnownConfigFile.RSACredentials:
|
case WellKnownConfigFile.RSACredentials:
|
||||||
path = Path.Combine(
|
path = Path.Combine(
|
||||||
GetDirectory(WellKnownDirectory.Root),
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
@@ -316,6 +333,13 @@ namespace GitHub.Runner.Common
|
|||||||
GetDirectory(WellKnownDirectory.Root),
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
".options");
|
".options");
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case WellKnownConfigFile.SetupInfo:
|
||||||
|
path = Path.Combine(
|
||||||
|
GetDirectory(WellKnownDirectory.Root),
|
||||||
|
".setup_info");
|
||||||
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
|
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
|
||||||
}
|
}
|
||||||
@@ -590,9 +614,8 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
|
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
|
||||||
{
|
{
|
||||||
HttpClientHandler clientHandler = new HttpClientHandler();
|
var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
|
||||||
clientHandler.Proxy = context.WebProxy;
|
return handlerFactory.CreateClientHandler(context.WebProxy);
|
||||||
return clientHandler;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
19
src/Runner.Common/HttpClientHandlerFactory.cs
Normal file
19
src/Runner.Common/HttpClientHandlerFactory.cs
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
using System.Net.Http;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
|
||||||
|
public interface IHttpClientHandlerFactory : IRunnerService
|
||||||
|
{
|
||||||
|
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
|
||||||
|
}
|
||||||
|
|
||||||
|
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
|
||||||
|
{
|
||||||
|
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
|
||||||
|
{
|
||||||
|
return new HttpClientHandler() { Proxy = webProxy };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,12 +16,14 @@ namespace GitHub.Runner.Common
|
|||||||
// logging and console
|
// logging and console
|
||||||
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
||||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
||||||
|
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
|
||||||
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
||||||
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
||||||
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||||
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||||
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
|
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
|
||||||
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||||
|
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class JobServer : RunnerService, IJobServer
|
public sealed class JobServer : RunnerService, IJobServer
|
||||||
@@ -78,6 +80,12 @@ namespace GitHub.Runner.Common
|
|||||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection();
|
||||||
|
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
|
|
||||||
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
||||||
{
|
{
|
||||||
CheckConnection();
|
CheckConnection();
|
||||||
@@ -113,5 +121,14 @@ namespace GitHub.Runner.Common
|
|||||||
CheckConnection();
|
CheckConnection();
|
||||||
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
|
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------
|
||||||
|
// Action download info
|
||||||
|
//-----------------------------------------------------------------
|
||||||
|
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
CheckConnection();
|
||||||
|
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
|||||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||||
Task ShutdownAsync();
|
Task ShutdownAsync();
|
||||||
void Start(Pipelines.AgentJobRequestMessage jobRequest);
|
void Start(Pipelines.AgentJobRequestMessage jobRequest);
|
||||||
void QueueWebConsoleLine(Guid stepRecordId, string line);
|
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||||
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
|
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
|
||||||
}
|
}
|
||||||
@@ -155,10 +155,10 @@ namespace GitHub.Runner.Common
|
|||||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void QueueWebConsoleLine(Guid stepRecordId, string line)
|
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||||
{
|
{
|
||||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line));
|
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
||||||
@@ -214,7 +214,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Group consolelines by timeline record of each step
|
// Group consolelines by timeline record of each step
|
||||||
Dictionary<Guid, List<string>> stepsConsoleLines = new Dictionary<Guid, List<string>>();
|
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new Dictionary<Guid, List<TimelineRecordLogLine>>();
|
||||||
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
|
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
|
||||||
int linesCounter = 0;
|
int linesCounter = 0;
|
||||||
ConsoleLineInfo lineInfo;
|
ConsoleLineInfo lineInfo;
|
||||||
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId))
|
if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId))
|
||||||
{
|
{
|
||||||
stepsConsoleLines[lineInfo.StepRecordId] = new List<string>();
|
stepsConsoleLines[lineInfo.StepRecordId] = new List<TimelineRecordLogLine>();
|
||||||
stepRecordIds.Add(lineInfo.StepRecordId);
|
stepRecordIds.Add(lineInfo.StepRecordId);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common
|
|||||||
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
||||||
}
|
}
|
||||||
|
|
||||||
stepsConsoleLines[lineInfo.StepRecordId].Add(lineInfo.Line);
|
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
|
||||||
linesCounter++;
|
linesCounter++;
|
||||||
|
|
||||||
// process at most about 500 lines of web console line during regular timer dequeue task.
|
// process at most about 500 lines of web console line during regular timer dequeue task.
|
||||||
@@ -247,13 +247,13 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
// Split consolelines into batch, each batch will container at most 100 lines.
|
// Split consolelines into batch, each batch will container at most 100 lines.
|
||||||
int batchCounter = 0;
|
int batchCounter = 0;
|
||||||
List<List<string>> batchedLines = new List<List<string>>();
|
List<List<TimelineRecordLogLine>> batchedLines = new List<List<TimelineRecordLogLine>>();
|
||||||
foreach (var line in stepsConsoleLines[stepRecordId])
|
foreach (var line in stepsConsoleLines[stepRecordId])
|
||||||
{
|
{
|
||||||
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
|
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
|
||||||
if (currentBatch == null)
|
if (currentBatch == null)
|
||||||
{
|
{
|
||||||
batchedLines.Add(new List<string>());
|
batchedLines.Add(new List<TimelineRecordLogLine>());
|
||||||
currentBatch = batchedLines.ElementAt(batchCounter);
|
currentBatch = batchedLines.ElementAt(batchCounter);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,7 +275,6 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run");
|
Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run");
|
||||||
batchedLines = batchedLines.TakeLast(2).ToList();
|
batchedLines = batchedLines.TakeLast(2).ToList();
|
||||||
batchedLines[0].Insert(0, "...");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int errorCount = 0;
|
int errorCount = 0;
|
||||||
@@ -284,7 +283,15 @@ namespace GitHub.Runner.Common
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
// we will not requeue failed batch, since the web console lines are time sensitive.
|
// we will not requeue failed batch, since the web console lines are time sensitive.
|
||||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch, default(CancellationToken));
|
if (batch[0].LineNumber.HasValue)
|
||||||
|
{
|
||||||
|
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
|
||||||
|
}
|
||||||
|
|
||||||
if (_firstConsoleOutputs)
|
if (_firstConsoleOutputs)
|
||||||
{
|
{
|
||||||
HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}");
|
HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}");
|
||||||
@@ -653,13 +660,15 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
internal class ConsoleLineInfo
|
internal class ConsoleLineInfo
|
||||||
{
|
{
|
||||||
public ConsoleLineInfo(Guid recordId, string line)
|
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)
|
||||||
{
|
{
|
||||||
this.StepRecordId = recordId;
|
this.StepRecordId = recordId;
|
||||||
this.Line = line;
|
this.Line = line;
|
||||||
|
this.LineNumber = lineNumber;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Guid StepRecordId { get; set; }
|
public Guid StepRecordId { get; set; }
|
||||||
public string Line { get; set; }
|
public string Line { get; set; }
|
||||||
|
public long? LineNumber { get; set; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
private Guid _timelineId;
|
private Guid _timelineId;
|
||||||
private Guid _timelineRecordId;
|
private Guid _timelineRecordId;
|
||||||
private string _pageId;
|
|
||||||
private FileStream _pageData;
|
private FileStream _pageData;
|
||||||
private StreamWriter _pageWriter;
|
private StreamWriter _pageWriter;
|
||||||
private int _byteCount;
|
private int _byteCount;
|
||||||
@@ -40,7 +39,6 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
_totalLines = 0;
|
_totalLines = 0;
|
||||||
_pageId = Guid.NewGuid().ToString();
|
|
||||||
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
|
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
|
||||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||||
Directory.CreateDirectory(_pagesFolder);
|
Directory.CreateDirectory(_pagesFolder);
|
||||||
@@ -102,7 +100,7 @@ namespace GitHub.Runner.Common
|
|||||||
{
|
{
|
||||||
EndPage();
|
EndPage();
|
||||||
_byteCount = 0;
|
_byteCount = 0;
|
||||||
_dataFileName = Path.Combine(_pagesFolder, $"{_pageId}_{++_pageCount}.log");
|
_dataFileName = Path.Combine(_pagesFolder, $"{_timelineId}_{_timelineRecordId}_{++_pageCount}.log");
|
||||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
|
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
|
||||||
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
|
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
// job request
|
// job request
|
||||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||||
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken);
|
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, CancellationToken cancellationToken);
|
||||||
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
|
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
|
||||||
|
|
||||||
// agent package
|
// agent package
|
||||||
@@ -296,10 +296,10 @@ namespace GitHub.Runner.Common
|
|||||||
// JobRequest
|
// JobRequest
|
||||||
//-----------------------------------------------------------------
|
//-----------------------------------------------------------------
|
||||||
|
|
||||||
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken))
|
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId = null, CancellationToken cancellationToken = default(CancellationToken))
|
||||||
{
|
{
|
||||||
CheckConnection(RunnerConnectionType.JobRequest);
|
CheckConnection(RunnerConnectionType.JobRequest);
|
||||||
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken);
|
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId: orchestrationId, cancellationToken: cancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
|
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
|
||||||
@@ -334,5 +334,20 @@ namespace GitHub.Runner.Common
|
|||||||
CheckConnection(RunnerConnectionType.Generic);
|
CheckConnection(RunnerConnectionType.Generic);
|
||||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
|
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//-----------------------------------------------------------------
|
||||||
|
// Runner Auth Url
|
||||||
|
//-----------------------------------------------------------------
|
||||||
|
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
|
||||||
|
{
|
||||||
|
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||||
|
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
|
||||||
|
{
|
||||||
|
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||||
|
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -96,13 +96,14 @@ namespace GitHub.Runner.Common
|
|||||||
Trace.Info($"WRITE: {message}");
|
Trace.Info($"WRITE: {message}");
|
||||||
if (!Silent)
|
if (!Silent)
|
||||||
{
|
{
|
||||||
if(colorCode != null)
|
if (colorCode != null)
|
||||||
{
|
{
|
||||||
Console.ForegroundColor = colorCode.Value;
|
Console.ForegroundColor = colorCode.Value;
|
||||||
Console.Write(message);
|
Console.Write(message);
|
||||||
Console.ResetColor();
|
Console.ResetColor();
|
||||||
}
|
}
|
||||||
else {
|
else
|
||||||
|
{
|
||||||
Console.Write(message);
|
Console.Write(message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -120,13 +121,14 @@ namespace GitHub.Runner.Common
|
|||||||
Trace.Info($"WRITE LINE: {line}");
|
Trace.Info($"WRITE LINE: {line}");
|
||||||
if (!Silent)
|
if (!Silent)
|
||||||
{
|
{
|
||||||
if(colorCode != null)
|
if (colorCode != null)
|
||||||
{
|
{
|
||||||
Console.ForegroundColor = colorCode.Value;
|
Console.ForegroundColor = colorCode.Value;
|
||||||
Console.WriteLine(line);
|
Console.WriteLine(line);
|
||||||
Console.ResetColor();
|
Console.ResetColor();
|
||||||
}
|
}
|
||||||
else {
|
else
|
||||||
|
{
|
||||||
Console.WriteLine(line);
|
Console.WriteLine(line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
using System;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common.Util
|
||||||
|
{
|
||||||
|
public static class EncodingUtil
|
||||||
|
{
|
||||||
|
public static async Task SetEncoding(IHostContext hostContext, Tracing trace, CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
#if OS_WINDOWS
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (Console.InputEncoding.CodePage != 65001)
|
||||||
|
{
|
||||||
|
using (var p = hostContext.CreateService<IProcessInvoker>())
|
||||||
|
{
|
||||||
|
// Use UTF8 code page
|
||||||
|
int exitCode = await p.ExecuteAsync(workingDirectory: hostContext.GetDirectory(WellKnownDirectory.Work),
|
||||||
|
fileName: WhichUtil.Which("chcp", true, trace),
|
||||||
|
arguments: "65001",
|
||||||
|
environment: null,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
outputEncoding: null,
|
||||||
|
killProcessOnCancel: false,
|
||||||
|
redirectStandardIn: null,
|
||||||
|
inheritConsoleHandler: true,
|
||||||
|
cancellationToken: cancellationToken);
|
||||||
|
if (exitCode == 0)
|
||||||
|
{
|
||||||
|
trace.Info("Successfully returned to code page 65001 (UTF8)");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
// Dummy variable to prevent compiler error CS1998: "This async method lacks 'await' operators and will run synchronously..."
|
||||||
|
await Task.CompletedTask;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -39,9 +39,10 @@ namespace GitHub.Runner.Listener
|
|||||||
private readonly string[] validArgs =
|
private readonly string[] validArgs =
|
||||||
{
|
{
|
||||||
Constants.Runner.CommandLine.Args.Auth,
|
Constants.Runner.CommandLine.Args.Auth,
|
||||||
|
Constants.Runner.CommandLine.Args.Labels,
|
||||||
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
||||||
Constants.Runner.CommandLine.Args.Name,
|
Constants.Runner.CommandLine.Args.Name,
|
||||||
Constants.Runner.CommandLine.Args.Pool,
|
Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||||
Constants.Runner.CommandLine.Args.StartupType,
|
Constants.Runner.CommandLine.Args.StartupType,
|
||||||
Constants.Runner.CommandLine.Args.Token,
|
Constants.Runner.CommandLine.Args.Token,
|
||||||
Constants.Runner.CommandLine.Args.Url,
|
Constants.Runner.CommandLine.Args.Url,
|
||||||
@@ -168,6 +169,15 @@ namespace GitHub.Runner.Listener
|
|||||||
validator: Validators.NonEmptyValidator);
|
validator: Validators.NonEmptyValidator);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public string GetRunnerGroupName(string defaultPoolName = null)
|
||||||
|
{
|
||||||
|
return GetArgOrPrompt(
|
||||||
|
name: Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||||
|
description: "Enter the name of the runner group to add this runner to:",
|
||||||
|
defaultValue: defaultPoolName ?? "default",
|
||||||
|
validator: Validators.NonEmptyValidator);
|
||||||
|
}
|
||||||
|
|
||||||
public string GetToken()
|
public string GetToken()
|
||||||
{
|
{
|
||||||
return GetArgOrPrompt(
|
return GetArgOrPrompt(
|
||||||
@@ -190,7 +200,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
return GetArgOrPrompt(
|
return GetArgOrPrompt(
|
||||||
name: Constants.Runner.CommandLine.Args.Token,
|
name: Constants.Runner.CommandLine.Args.Token,
|
||||||
description: "Enter runner deletion token:",
|
description: "Enter runner remove token:",
|
||||||
defaultValue: string.Empty,
|
defaultValue: string.Empty,
|
||||||
validator: Validators.NonEmptyValidator);
|
validator: Validators.NonEmptyValidator);
|
||||||
}
|
}
|
||||||
@@ -249,6 +259,24 @@ namespace GitHub.Runner.Listener
|
|||||||
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
|
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ISet<string> GetLabels()
|
||||||
|
{
|
||||||
|
var labelSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
string labels = GetArgOrPrompt(
|
||||||
|
name: Constants.Runner.CommandLine.Args.Labels,
|
||||||
|
description: $"This runner will have the following labels: 'self-hosted', '{VarUtil.OS}', '{VarUtil.OSArchitecture}' \nEnter any additional labels (ex. label-1,label-2):",
|
||||||
|
defaultValue: string.Empty,
|
||||||
|
validator: Validators.LabelsValidator,
|
||||||
|
isOptional: true);
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(labels))
|
||||||
|
{
|
||||||
|
labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
}
|
||||||
|
|
||||||
|
return labelSet;
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// Private helpers.
|
// Private helpers.
|
||||||
//
|
//
|
||||||
@@ -280,7 +308,8 @@ namespace GitHub.Runner.Listener
|
|||||||
string name,
|
string name,
|
||||||
string description,
|
string description,
|
||||||
string defaultValue,
|
string defaultValue,
|
||||||
Func<string, bool> validator)
|
Func<string, bool> validator,
|
||||||
|
bool isOptional = false)
|
||||||
{
|
{
|
||||||
// Check for the arg in the command line parser.
|
// Check for the arg in the command line parser.
|
||||||
ArgUtil.NotNull(validator, nameof(validator));
|
ArgUtil.NotNull(validator, nameof(validator));
|
||||||
@@ -291,7 +320,7 @@ namespace GitHub.Runner.Listener
|
|||||||
if (!string.IsNullOrEmpty(result))
|
if (!string.IsNullOrEmpty(result))
|
||||||
{
|
{
|
||||||
// After read the arg from input commandline args, remove it from Arg dictionary,
|
// After read the arg from input commandline args, remove it from Arg dictionary,
|
||||||
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
|
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
|
||||||
// It will prompt for input instead of continue use the bad input.
|
// It will prompt for input instead of continue use the bad input.
|
||||||
_trace.Info($"Remove {name} from Arg dictionary.");
|
_trace.Info($"Remove {name} from Arg dictionary.");
|
||||||
RemoveArg(name);
|
RemoveArg(name);
|
||||||
@@ -311,7 +340,8 @@ namespace GitHub.Runner.Listener
|
|||||||
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
|
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
|
||||||
defaultValue: defaultValue,
|
defaultValue: defaultValue,
|
||||||
validator: validator,
|
validator: validator,
|
||||||
unattended: Unattended);
|
unattended: Unattended,
|
||||||
|
isOptional: isOptional);
|
||||||
}
|
}
|
||||||
|
|
||||||
private string GetEnvArg(string name)
|
private string GetEnvArg(string name)
|
||||||
|
|||||||
@@ -1,19 +1,18 @@
|
|||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.OAuth;
|
using GitHub.Services.OAuth;
|
||||||
using GitHub.Services.WebApi;
|
using GitHub.Services.WebApi;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Security.Cryptography;
|
|
||||||
using System.Threading.Tasks;
|
|
||||||
using System.Runtime.InteropServices;
|
|
||||||
using GitHub.Runner.Common;
|
|
||||||
using GitHub.Runner.Sdk;
|
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
using System.Net.Http.Headers;
|
using System.Net.Http.Headers;
|
||||||
|
using System.Runtime.InteropServices;
|
||||||
|
using System.Security.Cryptography;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener.Configuration
|
namespace GitHub.Runner.Listener.Configuration
|
||||||
{
|
{
|
||||||
@@ -87,17 +86,17 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
RunnerSettings runnerSettings = new RunnerSettings();
|
RunnerSettings runnerSettings = new RunnerSettings();
|
||||||
|
|
||||||
bool isHostedServer = false;
|
|
||||||
// Loop getting url and creds until you can connect
|
// Loop getting url and creds until you can connect
|
||||||
ICredentialProvider credProvider = null;
|
ICredentialProvider credProvider = null;
|
||||||
VssCredentials creds = null;
|
VssCredentials creds = null;
|
||||||
_term.WriteSection("Authentication");
|
_term.WriteSection("Authentication");
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
// Get the URL
|
// When testing against a dev deployment of Actions Service, set this environment variable
|
||||||
|
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
|
||||||
var inputUrl = command.GetUrl();
|
var inputUrl = command.GetUrl();
|
||||||
if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase) &&
|
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|
||||||
!inputUrl.Contains("github.localhost", StringComparison.OrdinalIgnoreCase))
|
|| useDevActionsServiceUrl != null)
|
||||||
{
|
{
|
||||||
runnerSettings.ServerUrl = inputUrl;
|
runnerSettings.ServerUrl = inputUrl;
|
||||||
// Get the credentials
|
// Get the credentials
|
||||||
@@ -109,7 +108,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
runnerSettings.GitHubUrl = inputUrl;
|
runnerSettings.GitHubUrl = inputUrl;
|
||||||
var githubToken = command.GetRunnerRegisterToken();
|
var githubToken = command.GetRunnerRegisterToken();
|
||||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken);
|
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
|
||||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||||
creds = authResult.ToVssCredentials();
|
creds = authResult.ToVssCredentials();
|
||||||
Trace.Info("cred retrieved via GitHub auth");
|
Trace.Info("cred retrieved via GitHub auth");
|
||||||
@@ -118,7 +117,20 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||||
isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds);
|
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||||
|
|
||||||
|
// Warn if the Actions server url and GHES server url has different Host
|
||||||
|
if (!runnerSettings.IsHostedServer)
|
||||||
|
{
|
||||||
|
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
|
||||||
|
// Example githubServerUrl is https://my-ghes
|
||||||
|
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
|
||||||
|
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
|
||||||
|
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Validate can connect.
|
// Validate can connect.
|
||||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
|
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
|
||||||
@@ -147,17 +159,34 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
_term.WriteSection("Runner Registration");
|
_term.WriteSection("Runner Registration");
|
||||||
|
|
||||||
//Get all the agent pools, and select the first private pool
|
// If we have more than one runner group available, allow the user to specify which one to be added into
|
||||||
|
string poolName = null;
|
||||||
|
TaskAgentPool agentPool = null;
|
||||||
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
|
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
|
||||||
TaskAgentPool agentPool = agentPools?.Where(x => x.IsHosted == false).FirstOrDefault();
|
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
|
||||||
|
|
||||||
if (agentPool == null)
|
if (agentPools?.Where(x => !x.IsHosted).Count() > 1)
|
||||||
{
|
{
|
||||||
throw new TaskAgentPoolNotFoundException($"Could not find any private pool. Contact support.");
|
poolName = command.GetRunnerGroupName(defaultPool?.Name);
|
||||||
|
_term.WriteLine();
|
||||||
|
agentPool = agentPools.Where(x => string.Equals(poolName, x.Name, StringComparison.OrdinalIgnoreCase) && !x.IsHosted).FirstOrDefault();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Trace.Info("Found a private pool with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
agentPool = defaultPool;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (agentPool == null && poolName == null)
|
||||||
|
{
|
||||||
|
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner groups. Contact support.");
|
||||||
|
}
|
||||||
|
else if (agentPool == null && poolName != null)
|
||||||
|
{
|
||||||
|
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner group named \"{poolName}\".");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Info("Found a self-hosted runner group with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
||||||
runnerSettings.PoolId = agentPool.Id;
|
runnerSettings.PoolId = agentPool.Id;
|
||||||
runnerSettings.PoolName = agentPool.Name;
|
runnerSettings.PoolName = agentPool.Name;
|
||||||
}
|
}
|
||||||
@@ -169,6 +198,9 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
|
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
|
|
||||||
|
var userLabels = command.GetLabels();
|
||||||
|
_term.WriteLine();
|
||||||
|
|
||||||
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
||||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||||
agent = agents.FirstOrDefault();
|
agent = agents.FirstOrDefault();
|
||||||
@@ -178,7 +210,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
if (command.GetReplace())
|
if (command.GetReplace())
|
||||||
{
|
{
|
||||||
// Update existing agent with new PublicKey, agent version.
|
// Update existing agent with new PublicKey, agent version.
|
||||||
agent = UpdateExistingAgent(agent, publicKey);
|
agent = UpdateExistingAgent(agent, publicKey, userLabels);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -195,13 +227,13 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
else if (command.Unattended)
|
else if (command.Unattended)
|
||||||
{
|
{
|
||||||
// if not replace and it is unattended config.
|
// if not replace and it is unattended config.
|
||||||
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
|
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Create a new agent.
|
// Create a new agent.
|
||||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey);
|
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -219,44 +251,11 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
// Add Agent Id to settings
|
// Add Agent Id to settings
|
||||||
runnerSettings.AgentId = agent.Id;
|
runnerSettings.AgentId = agent.Id;
|
||||||
|
|
||||||
// respect the serverUrl resolve by server.
|
|
||||||
// in case of agent configured using collection url instead of account url.
|
|
||||||
string agentServerUrl;
|
|
||||||
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
|
|
||||||
!string.IsNullOrEmpty(agentServerUrl))
|
|
||||||
{
|
|
||||||
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
|
|
||||||
|
|
||||||
// we need make sure the Schema/Host/Port component of the url remain the same.
|
|
||||||
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
|
|
||||||
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
|
|
||||||
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
|
|
||||||
{
|
|
||||||
inputServerUrl.Path = serverReturnedServerUrl.Path;
|
|
||||||
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
|
|
||||||
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
runnerSettings.ServerUrl = agentServerUrl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// See if the server supports our OAuth key exchange for credentials
|
// See if the server supports our OAuth key exchange for credentials
|
||||||
if (agent.Authorization != null &&
|
if (agent.Authorization != null &&
|
||||||
agent.Authorization.ClientId != Guid.Empty &&
|
agent.Authorization.ClientId != Guid.Empty &&
|
||||||
agent.Authorization.AuthorizationUrl != null)
|
agent.Authorization.AuthorizationUrl != null)
|
||||||
{
|
{
|
||||||
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
|
|
||||||
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
|
|
||||||
if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
|
|
||||||
{
|
|
||||||
oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme;
|
|
||||||
oauthEndpointUrlBuilder.Host = configServerUrl.Host;
|
|
||||||
oauthEndpointUrlBuilder.Port = configServerUrl.Port;
|
|
||||||
Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'.");
|
|
||||||
}
|
|
||||||
|
|
||||||
var credentialData = new CredentialData
|
var credentialData = new CredentialData
|
||||||
{
|
{
|
||||||
Scheme = Constants.Configuration.OAuth,
|
Scheme = Constants.Configuration.OAuth,
|
||||||
@@ -264,7 +263,6 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||||
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -277,19 +275,22 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
throw new NotSupportedException("Message queue listen OAuth token.");
|
throw new NotSupportedException("Message queue listen OAuth token.");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired.
|
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
|
||||||
var credMgr = HostContext.GetService<ICredentialManager>();
|
var credMgr = HostContext.GetService<ICredentialManager>();
|
||||||
VssCredentials credential = credMgr.LoadCredentials();
|
VssCredentials credential = credMgr.LoadCredentials();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
|
||||||
|
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
|
||||||
|
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
|
||||||
|
await _runnerServer.GetAgentPoolsAsync();
|
||||||
_term.WriteSuccessMessage("Runner connection is good");
|
_term.WriteSuccessMessage("Runner connection is good");
|
||||||
}
|
}
|
||||||
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
|
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
|
||||||
{
|
{
|
||||||
// there are two exception messages server send that indicate clock skew.
|
// there are two exception messages server send that indicate clock skew.
|
||||||
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
|
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
|
||||||
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
|
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
|
||||||
Trace.Error("Catch exception during test agent connection.");
|
Trace.Error("Catch exception during test agent connection.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
|
||||||
@@ -373,13 +374,12 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
var githubToken = command.GetRunnerDeletionToken();
|
var githubToken = command.GetRunnerDeletionToken();
|
||||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken);
|
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
|
||||||
creds = authResult.ToVssCredentials();
|
creds = authResult.ToVssCredentials();
|
||||||
Trace.Info("cred retrieved via GitHub auth");
|
Trace.Info("cred retrieved via GitHub auth");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||||
bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds);
|
|
||||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||||
|
|
||||||
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
|
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
|
||||||
@@ -402,7 +402,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
_term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server.");
|
_term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server.");
|
||||||
}
|
}
|
||||||
|
|
||||||
//delete credential config files
|
//delete credential config files
|
||||||
currentAction = "Removing .credentials";
|
currentAction = "Removing .credentials";
|
||||||
if (hasCredentials)
|
if (hasCredentials)
|
||||||
{
|
{
|
||||||
@@ -416,7 +416,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
_term.WriteLine("Does not exist. Skipping " + currentAction);
|
||||||
}
|
}
|
||||||
|
|
||||||
//delete settings config file
|
//delete settings config file
|
||||||
currentAction = "Removing .runner";
|
currentAction = "Removing .runner";
|
||||||
if (isConfigured)
|
if (isConfigured)
|
||||||
{
|
{
|
||||||
@@ -457,7 +457,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey)
|
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNull(agent, nameof(agent));
|
ArgUtil.NotNull(agent, nameof(agent));
|
||||||
agent.Authorization = new TaskAgentAuthorization
|
agent.Authorization = new TaskAgentAuthorization
|
||||||
@@ -465,18 +465,25 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
|
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
|
||||||
};
|
};
|
||||||
|
|
||||||
// update - update instead of delete so we don't lose labels etc...
|
// update should replace the existing labels
|
||||||
agent.Version = BuildConstants.RunnerPackage.Version;
|
agent.Version = BuildConstants.RunnerPackage.Version;
|
||||||
agent.OSDescription = RuntimeInformation.OSDescription;
|
agent.OSDescription = RuntimeInformation.OSDescription;
|
||||||
|
|
||||||
agent.Labels.Add("self-hosted");
|
agent.Labels.Clear();
|
||||||
agent.Labels.Add(VarUtil.OS);
|
|
||||||
agent.Labels.Add(VarUtil.OSArchitecture);
|
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||||
|
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||||
|
|
||||||
|
foreach (var userLabel in userLabels)
|
||||||
|
{
|
||||||
|
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
|
||||||
|
}
|
||||||
|
|
||||||
return agent;
|
return agent;
|
||||||
}
|
}
|
||||||
|
|
||||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey)
|
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
|
||||||
{
|
{
|
||||||
TaskAgent agent = new TaskAgent(agentName)
|
TaskAgent agent = new TaskAgent(agentName)
|
||||||
{
|
{
|
||||||
@@ -489,45 +496,51 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
OSDescription = RuntimeInformation.OSDescription,
|
OSDescription = RuntimeInformation.OSDescription,
|
||||||
};
|
};
|
||||||
|
|
||||||
agent.Labels.Add("self-hosted");
|
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||||
agent.Labels.Add(VarUtil.OS);
|
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||||
agent.Labels.Add(VarUtil.OSArchitecture);
|
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||||
|
|
||||||
|
foreach (var userLabel in userLabels)
|
||||||
|
{
|
||||||
|
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
|
||||||
|
}
|
||||||
|
|
||||||
return agent;
|
return agent;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<bool> IsHostedServer(string serverUrl, VssCredentials credentials)
|
private bool IsHostedServer(UriBuilder gitHubUrl)
|
||||||
{
|
{
|
||||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||||
var locationServer = HostContext.GetService<ILocationServer>();
|
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||||
VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials);
|
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||||
await locationServer.ConnectAsync(connection);
|
|
||||||
try
|
|
||||||
{
|
|
||||||
var connectionData = await locationServer.GetConnectionDataAsync();
|
|
||||||
Trace.Info($"Server deployment type: {connectionData.DeploymentType}");
|
|
||||||
return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted);
|
|
||||||
}
|
|
||||||
catch (Exception ex)
|
|
||||||
{
|
|
||||||
// Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added.
|
|
||||||
// It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem
|
|
||||||
Trace.Error(ex);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken)
|
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||||
{
|
{
|
||||||
var gitHubUrl = new UriBuilder(githubUrl);
|
var githubApiUrl = "";
|
||||||
var githubApiUrl = $"https://api.{gitHubUrl.Host}/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration";
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
|
if (IsHostedServer(gitHubUrlBuilder))
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
|
||||||
|
}
|
||||||
|
|
||||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||||
using (var httpClient = new HttpClient(httpClientHandler))
|
using (var httpClient = new HttpClient(httpClientHandler))
|
||||||
{
|
{
|
||||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||||
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
|
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||||
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json"));
|
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json"));
|
var bodyObject = new Dictionary<string, string>()
|
||||||
|
{
|
||||||
|
{"url", githubUrl},
|
||||||
|
{"runner_event", runnerEvent}
|
||||||
|
};
|
||||||
|
|
||||||
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -50,6 +50,18 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
}
|
}
|
||||||
|
|
||||||
CredentialData credData = store.GetCredentials();
|
CredentialData credData = store.GetCredentials();
|
||||||
|
var migratedCred = store.GetMigratedCredentials();
|
||||||
|
if (migratedCred != null)
|
||||||
|
{
|
||||||
|
credData = migratedCred;
|
||||||
|
|
||||||
|
// Re-write .credentials with Token URL
|
||||||
|
store.SaveCredential(credData);
|
||||||
|
|
||||||
|
// Delete .credentials_migrated
|
||||||
|
store.DeleteMigratedCredential();
|
||||||
|
}
|
||||||
|
|
||||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||||
credProv.CredentialData = credData;
|
credProv.CredentialData = credData;
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
using System;
|
using System;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Services.OAuth;
|
using GitHub.Services.OAuth;
|
||||||
@@ -29,7 +28,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||||
|
|
||||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||||
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||||
|
|
||||||
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
|
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
|
||||||
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
|
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
|
||||||
@@ -39,7 +38,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
var keyManager = context.GetService<IRSAKeyManager>();
|
var keyManager = context.GetService<IRSAKeyManager>();
|
||||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
||||||
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
|
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
|
||||||
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||||
|
|
||||||
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
|
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
|
||||||
// is explicitly set to null to ensure we never do that negotiation.
|
// is explicitly set to null to ensure we never do that negotiation.
|
||||||
|
|||||||
@@ -20,7 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
bool secret,
|
bool secret,
|
||||||
string defaultValue,
|
string defaultValue,
|
||||||
Func<String, bool> validator,
|
Func<String, bool> validator,
|
||||||
bool unattended);
|
bool unattended,
|
||||||
|
bool isOptional = false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class PromptManager : RunnerService, IPromptManager
|
public sealed class PromptManager : RunnerService, IPromptManager
|
||||||
@@ -56,7 +57,8 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
bool secret,
|
bool secret,
|
||||||
string defaultValue,
|
string defaultValue,
|
||||||
Func<string, bool> validator,
|
Func<string, bool> validator,
|
||||||
bool unattended)
|
bool unattended,
|
||||||
|
bool isOptional = false)
|
||||||
{
|
{
|
||||||
Trace.Info(nameof(ReadValue));
|
Trace.Info(nameof(ReadValue));
|
||||||
ArgUtil.NotNull(validator, nameof(validator));
|
ArgUtil.NotNull(validator, nameof(validator));
|
||||||
@@ -70,6 +72,10 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
return defaultValue;
|
return defaultValue;
|
||||||
}
|
}
|
||||||
|
else if (isOptional)
|
||||||
|
{
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
// Otherwise throw.
|
// Otherwise throw.
|
||||||
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
|
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
|
||||||
@@ -85,18 +91,28 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
_terminal.Write($"[press Enter for {defaultValue}] ");
|
_terminal.Write($"[press Enter for {defaultValue}] ");
|
||||||
}
|
}
|
||||||
|
else if (isOptional){
|
||||||
|
_terminal.Write($"[press Enter to skip] ");
|
||||||
|
}
|
||||||
|
|
||||||
// Read and trim the value.
|
// Read and trim the value.
|
||||||
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
|
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
|
||||||
value = value?.Trim() ?? string.Empty;
|
value = value?.Trim() ?? string.Empty;
|
||||||
|
|
||||||
// Return the default if not specified.
|
// Return the default if not specified.
|
||||||
if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue))
|
if (string.IsNullOrEmpty(value))
|
||||||
{
|
{
|
||||||
Trace.Info($"Falling back to the default: '{defaultValue}'");
|
if (!string.IsNullOrEmpty(defaultValue))
|
||||||
return defaultValue;
|
{
|
||||||
|
Trace.Info($"Falling back to the default: '{defaultValue}'");
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
else if (isOptional)
|
||||||
|
{
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the value if it is not empty and it is valid.
|
// Return the value if it is not empty and it is valid.
|
||||||
// Otherwise try the loop again.
|
// Otherwise try the loop again.
|
||||||
if (!string.IsNullOrEmpty(value))
|
if (!string.IsNullOrEmpty(value))
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System;
|
using System;
|
||||||
|
using System.Linq;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Security.Principal;
|
using System.Security.Principal;
|
||||||
|
|
||||||
@@ -46,6 +47,21 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
|
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static bool LabelsValidator(string labels)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(labels))
|
||||||
|
{
|
||||||
|
var labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
if (labelSet.Any(x => x.Length > 256))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
public static bool NonEmptyValidator(string value)
|
public static bool NonEmptyValidator(string value)
|
||||||
{
|
{
|
||||||
return !string.IsNullOrEmpty(value);
|
return !string.IsNullOrEmpty(value);
|
||||||
|
|||||||
@@ -12,12 +12,14 @@ using System.Linq;
|
|||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.WebApi.Jwt;
|
||||||
|
|
||||||
namespace GitHub.Runner.Listener
|
namespace GitHub.Runner.Listener
|
||||||
{
|
{
|
||||||
[ServiceLocator(Default = typeof(JobDispatcher))]
|
[ServiceLocator(Default = typeof(JobDispatcher))]
|
||||||
public interface IJobDispatcher : IRunnerService
|
public interface IJobDispatcher : IRunnerService
|
||||||
{
|
{
|
||||||
|
bool Busy { get; }
|
||||||
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
|
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
|
||||||
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
|
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
|
||||||
bool Cancel(JobCancelMessage message);
|
bool Cancel(JobCancelMessage message);
|
||||||
@@ -69,6 +71,8 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
|
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
|
||||||
|
|
||||||
|
public bool Busy { get; private set; }
|
||||||
|
|
||||||
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
|
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
|
||||||
{
|
{
|
||||||
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
|
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
|
||||||
@@ -83,15 +87,30 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var orchestrationId = string.Empty;
|
||||||
|
var systemConnection = jobRequestMessage.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
if (systemConnection?.Authorization != null &&
|
||||||
|
systemConnection.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
|
||||||
|
!string.IsNullOrEmpty(accessToken))
|
||||||
|
{
|
||||||
|
var jwt = JsonWebToken.Create(accessToken);
|
||||||
|
var claims = jwt.ExtractClaims();
|
||||||
|
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||||
|
if (!string.IsNullOrEmpty(orchestrationId))
|
||||||
|
{
|
||||||
|
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
|
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
|
||||||
if (runOnce)
|
if (runOnce)
|
||||||
{
|
{
|
||||||
Trace.Info("Start dispatcher for one time used runner.");
|
Trace.Info("Start dispatcher for one time used runner.");
|
||||||
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||||
}
|
}
|
||||||
|
|
||||||
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
|
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
|
||||||
@@ -247,7 +266,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
|
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
|
||||||
if (completedTask != jobDispatch.WorkerDispatch)
|
if (completedTask != jobDispatch.WorkerDispatch)
|
||||||
{
|
{
|
||||||
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
|
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||||
// no need to localize the exception string should never happen.
|
// no need to localize the exception string should never happen.
|
||||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
||||||
}
|
}
|
||||||
@@ -281,11 +300,11 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
|
await RunAsync(message, orchestrationId, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
|
||||||
}
|
}
|
||||||
finally
|
finally
|
||||||
{
|
{
|
||||||
@@ -294,192 +313,292 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||||
{
|
{
|
||||||
if (previousJobDispatch != null)
|
Busy = true;
|
||||||
|
try
|
||||||
{
|
{
|
||||||
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
|
if (previousJobDispatch != null)
|
||||||
await EnsureDispatchFinished(previousJobDispatch);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
Trace.Verbose($"This is the first job request.");
|
|
||||||
}
|
|
||||||
|
|
||||||
var term = HostContext.GetService<ITerminal>();
|
|
||||||
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
|
|
||||||
|
|
||||||
// first job request renew succeed.
|
|
||||||
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
|
|
||||||
var notification = HostContext.GetService<IJobNotification>();
|
|
||||||
|
|
||||||
// lock renew cancellation token.
|
|
||||||
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
|
||||||
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
|
||||||
{
|
|
||||||
long requestId = message.RequestId;
|
|
||||||
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
|
||||||
|
|
||||||
// start renew job request
|
|
||||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
|
||||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
|
||||||
|
|
||||||
// wait till first renew succeed or job request is canceled
|
|
||||||
// not even start worker if the first renew fail
|
|
||||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
|
||||||
|
|
||||||
if (renewJobRequest.IsCompleted)
|
|
||||||
{
|
{
|
||||||
// renew job request task complete means we run out of retry for the first job request renew.
|
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
|
||||||
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
|
await EnsureDispatchFinished(previousJobDispatch);
|
||||||
return;
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Verbose($"This is the first job request.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (jobRequestCancellationToken.IsCancellationRequested)
|
var term = HostContext.GetService<ITerminal>();
|
||||||
|
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
|
||||||
|
|
||||||
|
// first job request renew succeed.
|
||||||
|
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
|
||||||
|
var notification = HostContext.GetService<IJobNotification>();
|
||||||
|
|
||||||
|
// lock renew cancellation token.
|
||||||
|
using (var lockRenewalTokenSource = new CancellationTokenSource())
|
||||||
|
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
|
||||||
{
|
{
|
||||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
long requestId = message.RequestId;
|
||||||
// stop renew lock
|
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
|
||||||
lockRenewalTokenSource.Cancel();
|
|
||||||
// renew job request should never blows up.
|
|
||||||
await renewJobRequest;
|
|
||||||
|
|
||||||
// complete job request with result Cancelled
|
// start renew job request
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
|
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||||
return;
|
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||||
}
|
|
||||||
|
|
||||||
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
|
// wait till first renew succeed or job request is canceled
|
||||||
|
// not even start worker if the first renew fail
|
||||||
|
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||||
|
|
||||||
Task<int> workerProcessTask = null;
|
if (renewJobRequest.IsCompleted)
|
||||||
object _outputLock = new object();
|
|
||||||
List<string> workerOutput = new List<string>();
|
|
||||||
using (var processChannel = HostContext.CreateService<IProcessChannel>())
|
|
||||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
|
||||||
{
|
|
||||||
// Start the process channel.
|
|
||||||
// It's OK if StartServer bubbles an execption after the worker process has already started.
|
|
||||||
// The worker will shutdown after 30 seconds if it hasn't received the job message.
|
|
||||||
processChannel.StartServer(
|
|
||||||
// Delegate to start the child process.
|
|
||||||
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
|
|
||||||
{
|
|
||||||
// Validate args.
|
|
||||||
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
|
|
||||||
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
|
|
||||||
|
|
||||||
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
|
|
||||||
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(stdout.Data))
|
|
||||||
{
|
|
||||||
lock (_outputLock)
|
|
||||||
{
|
|
||||||
workerOutput.Add(stdout.Data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Save STDERR from worker, worker will use STDERR on crash.
|
|
||||||
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
|
|
||||||
{
|
|
||||||
if (!string.IsNullOrEmpty(stderr.Data))
|
|
||||||
{
|
|
||||||
lock (_outputLock)
|
|
||||||
{
|
|
||||||
workerOutput.Add(stderr.Data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Start the child process.
|
|
||||||
HostContext.WritePerfCounter("StartingWorkerProcess");
|
|
||||||
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
|
||||||
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
|
|
||||||
workerProcessTask = processInvoker.ExecuteAsync(
|
|
||||||
workingDirectory: assemblyDirectory,
|
|
||||||
fileName: workerFileName,
|
|
||||||
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
|
|
||||||
environment: null,
|
|
||||||
requireExitCodeZero: false,
|
|
||||||
outputEncoding: null,
|
|
||||||
killProcessOnCancel: true,
|
|
||||||
redirectStandardIn: null,
|
|
||||||
inheritConsoleHandler: false,
|
|
||||||
keepStandardInOpen: false,
|
|
||||||
highPriorityProcess: true,
|
|
||||||
cancellationToken: workerProcessCancelTokenSource.Token);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Send the job request message.
|
|
||||||
// Kill the worker process if sending the job message times out. The worker
|
|
||||||
// process may have successfully received the job message.
|
|
||||||
try
|
|
||||||
{
|
{
|
||||||
Trace.Info($"Send job request message to worker for job {message.JobId}.");
|
// renew job request task complete means we run out of retry for the first job request renew.
|
||||||
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
|
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
|
||||||
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
|
return;
|
||||||
{
|
|
||||||
await processChannel.SendAsync(
|
|
||||||
messageType: MessageType.NewJobRequest,
|
|
||||||
body: JsonUtility.ToString(message),
|
|
||||||
cancellationToken: csSendJobRequest.Token);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException)
|
|
||||||
{
|
|
||||||
// message send been cancelled.
|
|
||||||
// timeout 30 sec. kill worker.
|
|
||||||
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
|
|
||||||
workerProcessCancelTokenSource.Cancel();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await workerProcessTask;
|
|
||||||
}
|
|
||||||
catch (OperationCanceledException)
|
|
||||||
{
|
|
||||||
Trace.Info("worker process has been killed.");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if (jobRequestCancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||||
// stop renew lock
|
// stop renew lock
|
||||||
lockRenewalTokenSource.Cancel();
|
lockRenewalTokenSource.Cancel();
|
||||||
// renew job request should never blows up.
|
// renew job request should never blows up.
|
||||||
await renewJobRequest;
|
await renewJobRequest;
|
||||||
|
|
||||||
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
|
// complete job request with result Cancelled
|
||||||
|
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// we get first jobrequest renew succeed and start the worker process with the job message.
|
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
|
||||||
// send notification to machine provisioner.
|
|
||||||
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
|
||||||
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
|
|
||||||
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
|
|
||||||
|
|
||||||
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
|
Task<int> workerProcessTask = null;
|
||||||
|
object _outputLock = new object();
|
||||||
try
|
List<string> workerOutput = new List<string>();
|
||||||
|
using (var processChannel = HostContext.CreateService<IProcessChannel>())
|
||||||
|
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||||
{
|
{
|
||||||
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
|
// Start the process channel.
|
||||||
// wait for renewlock, worker process or cancellation token been fired.
|
// It's OK if StartServer bubbles an execption after the worker process has already started.
|
||||||
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
|
// The worker will shutdown after 30 seconds if it hasn't received the job message.
|
||||||
if (completedTask == workerProcessTask)
|
processChannel.StartServer(
|
||||||
{
|
// Delegate to start the child process.
|
||||||
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
|
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
|
||||||
int returnCode = await workerProcessTask;
|
|
||||||
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
|
|
||||||
|
|
||||||
string detailInfo = null;
|
|
||||||
if (!TaskResultUtil.IsValidReturnCode(returnCode))
|
|
||||||
{
|
{
|
||||||
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
// Validate args.
|
||||||
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
|
||||||
await LogWorkerProcessUnhandledException(message, detailInfo);
|
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
|
||||||
|
|
||||||
|
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
|
||||||
|
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(stdout.Data))
|
||||||
|
{
|
||||||
|
lock (_outputLock)
|
||||||
|
{
|
||||||
|
workerOutput.Add(stdout.Data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Save STDERR from worker, worker will use STDERR on crash.
|
||||||
|
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(stderr.Data))
|
||||||
|
{
|
||||||
|
lock (_outputLock)
|
||||||
|
{
|
||||||
|
workerOutput.Add(stderr.Data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start the child process.
|
||||||
|
HostContext.WritePerfCounter("StartingWorkerProcess");
|
||||||
|
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
|
||||||
|
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
|
||||||
|
workerProcessTask = processInvoker.ExecuteAsync(
|
||||||
|
workingDirectory: assemblyDirectory,
|
||||||
|
fileName: workerFileName,
|
||||||
|
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
|
||||||
|
environment: null,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
outputEncoding: null,
|
||||||
|
killProcessOnCancel: true,
|
||||||
|
redirectStandardIn: null,
|
||||||
|
inheritConsoleHandler: false,
|
||||||
|
keepStandardInOpen: false,
|
||||||
|
highPriorityProcess: true,
|
||||||
|
cancellationToken: workerProcessCancelTokenSource.Token);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send the job request message.
|
||||||
|
// Kill the worker process if sending the job message times out. The worker
|
||||||
|
// process may have successfully received the job message.
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info($"Send job request message to worker for job {message.JobId}.");
|
||||||
|
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
|
||||||
|
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
|
||||||
|
{
|
||||||
|
await processChannel.SendAsync(
|
||||||
|
messageType: MessageType.NewJobRequest,
|
||||||
|
body: JsonUtility.ToString(message),
|
||||||
|
cancellationToken: csSendJobRequest.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
// message send been cancelled.
|
||||||
|
// timeout 30 sec. kill worker.
|
||||||
|
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||||
|
workerProcessCancelTokenSource.Cancel();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await workerProcessTask;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
Trace.Info("worker process has been killed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
|
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||||
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
|
// stop renew lock
|
||||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
|
lockRenewalTokenSource.Cancel();
|
||||||
|
// renew job request should never blows up.
|
||||||
|
await renewJobRequest;
|
||||||
|
|
||||||
|
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we get first jobrequest renew succeed and start the worker process with the job message.
|
||||||
|
// send notification to machine provisioner.
|
||||||
|
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||||
|
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
|
||||||
|
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
|
||||||
|
|
||||||
|
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
|
||||||
|
// wait for renewlock, worker process or cancellation token been fired.
|
||||||
|
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
|
||||||
|
if (completedTask == workerProcessTask)
|
||||||
|
{
|
||||||
|
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
|
||||||
|
int returnCode = await workerProcessTask;
|
||||||
|
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
|
||||||
|
|
||||||
|
string detailInfo = null;
|
||||||
|
if (!TaskResultUtil.IsValidReturnCode(returnCode))
|
||||||
|
{
|
||||||
|
detailInfo = string.Join(Environment.NewLine, workerOutput);
|
||||||
|
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
|
||||||
|
await LogWorkerProcessUnhandledException(message, detailInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
|
||||||
|
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
|
||||||
|
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
|
||||||
|
|
||||||
|
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||||
|
// stop renew lock
|
||||||
|
lockRenewalTokenSource.Cancel();
|
||||||
|
// renew job request should never blows up.
|
||||||
|
await renewJobRequest;
|
||||||
|
|
||||||
|
// complete job request
|
||||||
|
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
|
||||||
|
|
||||||
|
// print out unhandled exception happened in worker after we complete job request.
|
||||||
|
// when we run out of disk space, report back to server has higher priority.
|
||||||
|
if (!string.IsNullOrEmpty(detailInfo))
|
||||||
|
{
|
||||||
|
Trace.Error("Unhandled exception happened in worker:");
|
||||||
|
Trace.Error(detailInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else if (completedTask == renewJobRequest)
|
||||||
|
{
|
||||||
|
resultOnAbandonOrCancel = TaskResult.Abandoned;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
resultOnAbandonOrCancel = TaskResult.Canceled;
|
||||||
|
}
|
||||||
|
|
||||||
|
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
|
||||||
|
// cancel worker gracefully first, then kill it after worker cancel timeout
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
|
||||||
|
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
|
||||||
|
{
|
||||||
|
var messageType = MessageType.CancelRequest;
|
||||||
|
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
switch (HostContext.RunnerShutdownReason)
|
||||||
|
{
|
||||||
|
case ShutdownReason.UserCancelled:
|
||||||
|
messageType = MessageType.RunnerShutdown;
|
||||||
|
break;
|
||||||
|
case ShutdownReason.OperatingSystemShutdown:
|
||||||
|
messageType = MessageType.OperatingSystemShutdown;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await processChannel.SendAsync(
|
||||||
|
messageType: messageType,
|
||||||
|
body: string.Empty,
|
||||||
|
cancellationToken: csSendCancel.Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
// message send been cancelled.
|
||||||
|
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
|
||||||
|
workerProcessCancelTokenSource.Cancel();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await workerProcessTask;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
Trace.Info("worker process has been killed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// wait worker to exit
|
||||||
|
// if worker doesn't exit within timeout, then kill worker.
|
||||||
|
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
||||||
|
|
||||||
|
// worker haven't exit within cancellation timeout.
|
||||||
|
if (completedTask != workerProcessTask)
|
||||||
|
{
|
||||||
|
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
|
||||||
|
workerProcessCancelTokenSource.Cancel();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await workerProcessTask;
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException)
|
||||||
|
{
|
||||||
|
Trace.Info("worker process has been killed.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
|
||||||
|
// The runner will try to upload these logs at this time.
|
||||||
|
await TryUploadUnfinishedLogs(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
|
||||||
|
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
|
||||||
|
// complete job request with cancel result, stop renew lock, job has finished.
|
||||||
|
|
||||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
||||||
// stop renew lock
|
// stop renew lock
|
||||||
@@ -488,111 +607,23 @@ namespace GitHub.Runner.Listener
|
|||||||
await renewJobRequest;
|
await renewJobRequest;
|
||||||
|
|
||||||
// complete job request
|
// complete job request
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
|
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
|
||||||
|
|
||||||
// print out unhandled exception happened in worker after we complete job request.
|
|
||||||
// when we run out of disk space, report back to server has higher priority.
|
|
||||||
if (!string.IsNullOrEmpty(detailInfo))
|
|
||||||
{
|
|
||||||
Trace.Error("Unhandled exception happened in worker:");
|
|
||||||
Trace.Error(detailInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
else if (completedTask == renewJobRequest)
|
finally
|
||||||
{
|
{
|
||||||
resultOnAbandonOrCancel = TaskResult.Abandoned;
|
// This should be the last thing to run so we don't notify external parties until actually finished
|
||||||
|
await notification.JobCompleted(message.JobId);
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
|
||||||
resultOnAbandonOrCancel = TaskResult.Canceled;
|
|
||||||
}
|
|
||||||
|
|
||||||
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
|
|
||||||
// cancel worker gracefully first, then kill it after worker cancel timeout
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
|
|
||||||
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
|
|
||||||
{
|
|
||||||
var messageType = MessageType.CancelRequest;
|
|
||||||
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
|
|
||||||
{
|
|
||||||
switch (HostContext.RunnerShutdownReason)
|
|
||||||
{
|
|
||||||
case ShutdownReason.UserCancelled:
|
|
||||||
messageType = MessageType.RunnerShutdown;
|
|
||||||
break;
|
|
||||||
case ShutdownReason.OperatingSystemShutdown:
|
|
||||||
messageType = MessageType.OperatingSystemShutdown;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await processChannel.SendAsync(
|
|
||||||
messageType: messageType,
|
|
||||||
body: string.Empty,
|
|
||||||
cancellationToken: csSendCancel.Token);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (OperationCanceledException)
|
|
||||||
{
|
|
||||||
// message send been cancelled.
|
|
||||||
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
|
|
||||||
workerProcessCancelTokenSource.Cancel();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await workerProcessTask;
|
|
||||||
}
|
|
||||||
catch (OperationCanceledException)
|
|
||||||
{
|
|
||||||
Trace.Info("worker process has been killed.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// wait worker to exit
|
|
||||||
// if worker doesn't exit within timeout, then kill worker.
|
|
||||||
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
|
|
||||||
|
|
||||||
// worker haven't exit within cancellation timeout.
|
|
||||||
if (completedTask != workerProcessTask)
|
|
||||||
{
|
|
||||||
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
|
|
||||||
workerProcessCancelTokenSource.Cancel();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
await workerProcessTask;
|
|
||||||
}
|
|
||||||
catch (OperationCanceledException)
|
|
||||||
{
|
|
||||||
Trace.Info("worker process has been killed.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
|
|
||||||
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
|
|
||||||
// complete job request with cancel result, stop renew lock, job has finished.
|
|
||||||
|
|
||||||
Trace.Info($"Stop renew job request for job {message.JobId}.");
|
|
||||||
// stop renew lock
|
|
||||||
lockRenewalTokenSource.Cancel();
|
|
||||||
// renew job request should never blows up.
|
|
||||||
await renewJobRequest;
|
|
||||||
|
|
||||||
// complete job request
|
|
||||||
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
// This should be the last thing to run so we don't notify external parties until actually finished
|
|
||||||
await notification.JobCompleted(message.JobId);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
Busy = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
|
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
|
||||||
{
|
{
|
||||||
var runnerServer = HostContext.GetService<IRunnerServer>();
|
var runnerServer = HostContext.GetService<IRunnerServer>();
|
||||||
TaskAgentJobRequest request = null;
|
TaskAgentJobRequest request = null;
|
||||||
@@ -605,7 +636,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token);
|
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId, token);
|
||||||
|
|
||||||
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");
|
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");
|
||||||
|
|
||||||
@@ -712,7 +743,121 @@ namespace GitHub.Runner.Listener
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: We need send detailInfo back to DT in order to add an issue for the job
|
// Best effort upload any logs for this job.
|
||||||
|
private async Task TryUploadUnfinishedLogs(Pipelines.AgentJobRequestMessage message)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
|
||||||
|
var logFolder = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), PagingLogger.PagingFolder);
|
||||||
|
if (!Directory.Exists(logFolder))
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var logs = Directory.GetFiles(logFolder);
|
||||||
|
if (logs.Length == 0)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
|
||||||
|
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
|
||||||
|
|
||||||
|
var jobServer = HostContext.GetService<IJobServer>();
|
||||||
|
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
|
||||||
|
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
|
||||||
|
|
||||||
|
await jobServer.ConnectAsync(jobConnection);
|
||||||
|
|
||||||
|
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
|
||||||
|
|
||||||
|
var updatedRecords = new List<TimelineRecord>();
|
||||||
|
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
|
||||||
|
var logRecords = new Dictionary<Guid, TimelineRecord>();
|
||||||
|
foreach (var log in logs)
|
||||||
|
{
|
||||||
|
var logName = Path.GetFileNameWithoutExtension(log);
|
||||||
|
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
|
||||||
|
if (logNameParts.Length != 3)
|
||||||
|
{
|
||||||
|
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var logPageSeperator = logName.IndexOf('_');
|
||||||
|
var logRecordId = Guid.Empty;
|
||||||
|
var pageNumber = 0;
|
||||||
|
|
||||||
|
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
|
||||||
|
{
|
||||||
|
Trace.Warning($"log file '{log}' is not belongs to current job");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Guid.TryParse(logNameParts[1], out logRecordId))
|
||||||
|
{
|
||||||
|
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!int.TryParse(logNameParts[2], out pageNumber))
|
||||||
|
{
|
||||||
|
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
|
||||||
|
if (record != null)
|
||||||
|
{
|
||||||
|
if (!logPages.ContainsKey(record.Id))
|
||||||
|
{
|
||||||
|
logPages[record.Id] = new Dictionary<int, string>();
|
||||||
|
logRecords[record.Id] = record;
|
||||||
|
}
|
||||||
|
|
||||||
|
logPages[record.Id][pageNumber] = log;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (var pages in logPages)
|
||||||
|
{
|
||||||
|
var record = logRecords[pages.Key];
|
||||||
|
if (record.Log == null)
|
||||||
|
{
|
||||||
|
// Create the log
|
||||||
|
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
|
||||||
|
|
||||||
|
// Need to post timeline record updates to reflect the log creation
|
||||||
|
updatedRecords.Add(record.Clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 1; i <= pages.Value.Count; i++)
|
||||||
|
{
|
||||||
|
var logFile = pages.Value[i];
|
||||||
|
// Upload the contents
|
||||||
|
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
|
||||||
|
{
|
||||||
|
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
|
||||||
|
IOUtil.DeleteFile(logFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updatedRecords.Count > 0)
|
||||||
|
{
|
||||||
|
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// Ignore any error during log upload since it's best effort
|
||||||
|
Trace.Error(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
|
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
@@ -806,8 +951,10 @@ namespace GitHub.Runner.Listener
|
|||||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||||
|
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
|
||||||
|
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||||
jobRecord.ErrorCount++;
|
jobRecord.ErrorCount++;
|
||||||
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage });
|
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Info("Connecting to the Runner Server...");
|
Trace.Info("Connecting to the Runner Server...");
|
||||||
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
|
||||||
Trace.Info("VssConnection created");
|
Trace.Info("VssConnection created");
|
||||||
|
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
_term.WriteSuccessMessage("Connected to GitHub");
|
_term.WriteSuccessMessage("Connected to GitHub");
|
||||||
_term.WriteLine();
|
_term.WriteLine();
|
||||||
@@ -118,6 +118,20 @@ namespace GitHub.Runner.Listener
|
|||||||
Trace.Error("Catch exception during create session.");
|
Trace.Error("Catch exception during create session.");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
|
|
||||||
|
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||||
|
{
|
||||||
|
// Check whether we get 401 because the runner registration already removed by the service.
|
||||||
|
// If the runner registration get deleted, we can't exchange oauth token.
|
||||||
|
Trace.Error("Test oauth app registration.");
|
||||||
|
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
|
||||||
|
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||||
|
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!IsSessionCreationExceptionRetriable(ex))
|
if (!IsSessionCreationExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||||
|
|||||||
@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
|
|||||||
IRunner runner = context.GetService<IRunner>();
|
IRunner runner = context.GetService<IRunner>();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return await runner.ExecuteCommand(command);
|
var returnCode = await runner.ExecuteCommand(command);
|
||||||
|
trace.Info($"Runner execution has finished with return code {returnCode}");
|
||||||
|
return returnCode;
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
|
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ namespace GitHub.Runner.Listener
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
|
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||||
|
|
||||||
_inConfigStage = true;
|
_inConfigStage = true;
|
||||||
_completedCommand.Reset();
|
_completedCommand.Reset();
|
||||||
@@ -451,16 +451,42 @@ namespace GitHub.Runner.Listener
|
|||||||
ext = "sh";
|
ext = "sh";
|
||||||
#endif
|
#endif
|
||||||
_term.WriteLine($@"
|
_term.WriteLine($@"
|
||||||
Commands:,
|
Commands:
|
||||||
.{separator}config.{ext} Configures the runner
|
.{separator}config.{ext} Configures the runner
|
||||||
.{separator}config.{ext} remove Unconfigures the runner
|
.{separator}config.{ext} remove Unconfigures the runner
|
||||||
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
|
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
|
--help Prints the help for each command
|
||||||
--version Prints the runner version
|
--version Prints the runner version
|
||||||
--commit Prints the runner commit
|
--commit Prints the runner commit
|
||||||
--help Prints the help for each command
|
|
||||||
");
|
Config Options:
|
||||||
|
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
|
||||||
|
--url string Repository to add the runner to. Required if unattended
|
||||||
|
--token string Registration token. Required if unattended
|
||||||
|
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||||
|
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
||||||
|
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||||
|
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||||
|
--replace Replace any existing runner with the same name (default false)");
|
||||||
|
#if OS_WINDOWS
|
||||||
|
_term.WriteLine($@" --runasservice Run the runner as a service");
|
||||||
|
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||||
|
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
|
||||||
|
#endif
|
||||||
|
_term.WriteLine($@"
|
||||||
|
Examples:
|
||||||
|
Configure a runner non-interactively:
|
||||||
|
.{separator}config.{ext} --unattended --url <url> --token <token>
|
||||||
|
Configure a runner non-interactively, replacing any existing runner with the same name:
|
||||||
|
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
|
||||||
|
Configure a runner non-interactively with three extra labels:
|
||||||
|
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
|
||||||
|
#if OS_WINDOWS
|
||||||
|
_term.WriteLine($@" Configure a runner to run as a service:");
|
||||||
|
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ namespace GitHub.Runner.Listener
|
|||||||
[ServiceLocator(Default = typeof(SelfUpdater))]
|
[ServiceLocator(Default = typeof(SelfUpdater))]
|
||||||
public interface ISelfUpdater : IRunnerService
|
public interface ISelfUpdater : IRunnerService
|
||||||
{
|
{
|
||||||
|
bool Busy { get; }
|
||||||
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,6 +32,8 @@ namespace GitHub.Runner.Listener
|
|||||||
private int _poolId;
|
private int _poolId;
|
||||||
private int _agentId;
|
private int _agentId;
|
||||||
|
|
||||||
|
public bool Busy { get; private set; }
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
@@ -45,52 +48,60 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||||
{
|
{
|
||||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
Busy = true;
|
||||||
|
try
|
||||||
{
|
{
|
||||||
Trace.Info($"Can't find available update package.");
|
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||||
return false;
|
{
|
||||||
}
|
Trace.Info($"Can't find available update package.");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
Trace.Info($"An update is available.");
|
Trace.Info($"An update is available.");
|
||||||
|
|
||||||
// Print console line that warn user not shutdown runner.
|
// Print console line that warn user not shutdown runner.
|
||||||
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
|
||||||
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
|
||||||
|
|
||||||
await DownloadLatestRunner(token);
|
await DownloadLatestRunner(token);
|
||||||
Trace.Info($"Download latest runner and unzip into runner root.");
|
Trace.Info($"Download latest runner and unzip into runner root.");
|
||||||
|
|
||||||
// wait till all running job finish
|
// wait till all running job finish
|
||||||
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
|
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
|
||||||
|
|
||||||
await jobDispatcher.WaitAsync(token);
|
await jobDispatcher.WaitAsync(token);
|
||||||
Trace.Info($"All running job has exited.");
|
Trace.Info($"All running job has exited.");
|
||||||
|
|
||||||
// delete runner backup
|
// delete runner backup
|
||||||
DeletePreviousVersionRunnerBackup(token);
|
DeletePreviousVersionRunnerBackup(token);
|
||||||
Trace.Info($"Delete old version runner backup.");
|
Trace.Info($"Delete old version runner backup.");
|
||||||
|
|
||||||
// generate update script from template
|
// generate update script from template
|
||||||
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
|
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
|
||||||
|
|
||||||
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
|
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
|
||||||
Trace.Info($"Generate update script into: {updateScript}");
|
Trace.Info($"Generate update script into: {updateScript}");
|
||||||
|
|
||||||
// kick off update script
|
// kick off update script
|
||||||
Process invokeScript = new Process();
|
Process invokeScript = new Process();
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
|
||||||
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
|
||||||
#elif (OS_OSX || OS_LINUX)
|
#elif (OS_OSX || OS_LINUX)
|
||||||
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
|
||||||
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
|
||||||
#endif
|
#endif
|
||||||
invokeScript.Start();
|
invokeScript.Start();
|
||||||
Trace.Info($"Update script start running");
|
Trace.Info($"Update script start running");
|
||||||
|
|
||||||
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
|
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
Busy = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)
|
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)
|
||||||
|
|||||||
@@ -80,7 +80,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
|||||||
// Validate args.
|
// Validate args.
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
executionContext.Output($"Syncing repository: {repoFullName}");
|
executionContext.Output($"Syncing repository: {repoFullName}");
|
||||||
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
|
|
||||||
|
// Repository URL
|
||||||
|
var githubUrl = executionContext.GetGitHubContext("server_url");
|
||||||
|
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
|
||||||
|
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
|
||||||
|
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
|
||||||
if (!repositoryUrl.IsAbsoluteUri)
|
if (!repositoryUrl.IsAbsoluteUri)
|
||||||
{
|
{
|
||||||
throw new InvalidOperationException("Repository url need to be an absolute uri.");
|
throw new InvalidOperationException("Repository url need to be an absolute uri.");
|
||||||
|
|||||||
@@ -271,6 +271,14 @@ namespace GitHub.Runner.Sdk
|
|||||||
// Indicate GitHub Actions process.
|
// Indicate GitHub Actions process.
|
||||||
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
|
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
|
||||||
|
|
||||||
|
// Set CI=true when no one else already set it.
|
||||||
|
// CI=true is common set in most CI provider in GitHub
|
||||||
|
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
|
||||||
|
Environment.GetEnvironmentVariable("CI") == null)
|
||||||
|
{
|
||||||
|
_proc.StartInfo.Environment["CI"] = "true";
|
||||||
|
}
|
||||||
|
|
||||||
// Hook up the events.
|
// Hook up the events.
|
||||||
_proc.EnableRaisingEvents = true;
|
_proc.EnableRaisingEvents = true;
|
||||||
_proc.Exited += ProcessExitedHandler;
|
_proc.Exited += ProcessExitedHandler;
|
||||||
@@ -310,7 +318,12 @@ namespace GitHub.Runner.Sdk
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
|
var cancellationFinished = new TaskCompletionSource<bool>();
|
||||||
|
using (var registration = cancellationToken.Register(async () =>
|
||||||
|
{
|
||||||
|
await CancelAndKillProcessTree(killProcessOnCancel);
|
||||||
|
cancellationFinished.TrySetResult(true);
|
||||||
|
}))
|
||||||
{
|
{
|
||||||
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
|
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
|
||||||
while (true)
|
while (true)
|
||||||
@@ -333,6 +346,13 @@ namespace GitHub.Runner.Sdk
|
|||||||
// data buffers one last time before returning
|
// data buffers one last time before returning
|
||||||
ProcessOutput();
|
ProcessOutput();
|
||||||
|
|
||||||
|
if (cancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
// Ensure cancellation also finish on the cancellationToken.Register thread.
|
||||||
|
await cancellationFinished.Task;
|
||||||
|
Trace.Info($"Process Cancellation finished.");
|
||||||
|
}
|
||||||
|
|
||||||
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
|
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
private string _httpsProxyAddress;
|
private string _httpsProxyAddress;
|
||||||
private string _httpsProxyUsername;
|
private string _httpsProxyUsername;
|
||||||
private string _httpsProxyPassword;
|
private string _httpsProxyPassword;
|
||||||
|
private string _noProxyString;
|
||||||
|
|
||||||
private readonly List<ByPassInfo> _noProxyList = new List<ByPassInfo>();
|
private readonly List<ByPassInfo> _noProxyList = new List<ByPassInfo>();
|
||||||
private readonly HashSet<string> _noProxyUnique = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
private readonly HashSet<string> _noProxyUnique = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
@@ -33,6 +34,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
public string HttpsProxyAddress => _httpsProxyAddress;
|
public string HttpsProxyAddress => _httpsProxyAddress;
|
||||||
public string HttpsProxyUsername => _httpsProxyUsername;
|
public string HttpsProxyUsername => _httpsProxyUsername;
|
||||||
public string HttpsProxyPassword => _httpsProxyPassword;
|
public string HttpsProxyPassword => _httpsProxyPassword;
|
||||||
|
public string NoProxyString => _noProxyString;
|
||||||
|
|
||||||
public List<ByPassInfo> NoProxyList => _noProxyList;
|
public List<ByPassInfo> NoProxyList => _noProxyList;
|
||||||
|
|
||||||
@@ -71,6 +73,10 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
|
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
|
||||||
|
|
||||||
|
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||||
|
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
|
||||||
|
Environment.SetEnvironmentVariable("http_proxy", _httpProxyAddress);
|
||||||
|
|
||||||
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
||||||
var userInfo = Uri.UnescapeDataString(proxyHttpUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
var userInfo = Uri.UnescapeDataString(proxyHttpUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
||||||
if (userInfo.Length == 2)
|
if (userInfo.Length == 2)
|
||||||
@@ -97,6 +103,10 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
|
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
|
||||||
|
|
||||||
|
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||||
|
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
|
||||||
|
Environment.SetEnvironmentVariable("https_proxy", _httpsProxyAddress);
|
||||||
|
|
||||||
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
|
||||||
var userInfo = Uri.UnescapeDataString(proxyHttpsUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
var userInfo = Uri.UnescapeDataString(proxyHttpsUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
|
||||||
if (userInfo.Length == 2)
|
if (userInfo.Length == 2)
|
||||||
@@ -121,6 +131,12 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
if (!string.IsNullOrEmpty(noProxyList))
|
if (!string.IsNullOrEmpty(noProxyList))
|
||||||
{
|
{
|
||||||
|
_noProxyString = noProxyList;
|
||||||
|
|
||||||
|
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||||
|
Environment.SetEnvironmentVariable("NO_PROXY", noProxyList);
|
||||||
|
Environment.SetEnvironmentVariable("no_proxy", noProxyList);
|
||||||
|
|
||||||
var noProxyListSplit = noProxyList.Split(',', StringSplitOptions.RemoveEmptyEntries);
|
var noProxyListSplit = noProxyList.Split(',', StringSplitOptions.RemoveEmptyEntries);
|
||||||
foreach (string noProxy in noProxyListSplit)
|
foreach (string noProxy in noProxyListSplit)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Sdk
|
|||||||
//
|
//
|
||||||
// For example, on an en-US box, this is required for loading the encoding for the
|
// For example, on an en-US box, this is required for loading the encoding for the
|
||||||
// default console output code page '437'. Without loading the correct encoding for
|
// default console output code page '437'. Without loading the correct encoding for
|
||||||
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
||||||
// from powershell.exe.
|
// from powershell.exe.
|
||||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -14,10 +14,10 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
public static class VssUtil
|
public static class VssUtil
|
||||||
{
|
{
|
||||||
public static void InitializeVssClientSettings(ProductInfoHeaderValue additionalUserAgent, IWebProxy proxy)
|
public static void InitializeVssClientSettings(List<ProductInfoHeaderValue> additionalUserAgents, IWebProxy proxy)
|
||||||
{
|
{
|
||||||
var headerValues = new List<ProductInfoHeaderValue>();
|
var headerValues = new List<ProductInfoHeaderValue>();
|
||||||
headerValues.Add(additionalUserAgent);
|
headerValues.AddRange(additionalUserAgents);
|
||||||
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
|
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
|
||||||
|
|
||||||
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||||
|
|||||||
@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
|
|||||||
{
|
{
|
||||||
ArgUtil.NotNullOrEmpty(command, nameof(command));
|
ArgUtil.NotNullOrEmpty(command, nameof(command));
|
||||||
trace?.Info($"Which: '{command}'");
|
trace?.Info($"Which: '{command}'");
|
||||||
|
if (Path.IsPathFullyQualified(command) && File.Exists(command))
|
||||||
|
{
|
||||||
|
trace?.Info($"Fully qualified path: '{command}'");
|
||||||
|
return command;
|
||||||
|
}
|
||||||
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||||
if (string.IsNullOrEmpty(path))
|
if (string.IsNullOrEmpty(path))
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
using GitHub.DistributedTask.Pipelines;
|
using GitHub.DistributedTask.Pipelines;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
@@ -15,14 +16,14 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
void EnablePluginInternalCommand();
|
void EnablePluginInternalCommand();
|
||||||
void DisablePluginInternalCommand();
|
void DisablePluginInternalCommand();
|
||||||
bool TryProcessCommand(IExecutionContext context, string input);
|
bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
|
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
|
||||||
{
|
{
|
||||||
private const string _stopCommand = "stop-commands";
|
private const string _stopCommand = "stop-commands";
|
||||||
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
|
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
|
||||||
private HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
private readonly HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
private readonly object _commandSerializeLock = new object();
|
private readonly object _commandSerializeLock = new object();
|
||||||
private bool _stopProcessCommand = false;
|
private bool _stopProcessCommand = false;
|
||||||
private string _stopToken = null;
|
private string _stopToken = null;
|
||||||
@@ -58,7 +59,7 @@ namespace GitHub.Runner.Worker
|
|||||||
_registeredCommands.Remove("internal-set-repo-path");
|
_registeredCommands.Remove("internal-set-repo-path");
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool TryProcessCommand(IExecutionContext context, string input)
|
public bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(input))
|
if (string.IsNullOrEmpty(input))
|
||||||
{
|
{
|
||||||
@@ -114,7 +115,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
extension.ProcessCommand(context, input, actionCommand);
|
extension.ProcessCommand(context, input, actionCommand, container);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -140,7 +141,7 @@ namespace GitHub.Runner.Worker
|
|||||||
string Command { get; }
|
string Command { get; }
|
||||||
bool OmitEcho { get; }
|
bool OmitEcho { get; }
|
||||||
|
|
||||||
void ProcessCommand(IExecutionContext context, string line, ActionCommand command);
|
void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension
|
public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension
|
||||||
@@ -150,7 +151,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName))
|
if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName))
|
||||||
{
|
{
|
||||||
@@ -180,14 +181,14 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
|
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
|
||||||
{
|
{
|
||||||
throw new Exception("Required field 'name' is missing in ##[set-env] command.");
|
throw new Exception("Required field 'name' is missing in ##[set-env] command.");
|
||||||
}
|
}
|
||||||
|
|
||||||
context.EnvironmentVariables[envName] = command.Data;
|
context.Global.EnvironmentVariables[envName] = command.Data;
|
||||||
context.SetEnvContext(envName, command.Data);
|
context.SetEnvContext(envName, command.Data);
|
||||||
context.Debug($"{envName}='{command.Data}'");
|
context.Debug($"{envName}='{command.Data}'");
|
||||||
}
|
}
|
||||||
@@ -205,7 +206,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
|
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
|
||||||
{
|
{
|
||||||
@@ -229,7 +230,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
|
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
|
||||||
{
|
{
|
||||||
@@ -253,7 +254,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
if (string.IsNullOrWhiteSpace(command.Data))
|
if (string.IsNullOrWhiteSpace(command.Data))
|
||||||
{
|
{
|
||||||
@@ -279,11 +280,11 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNullOrEmpty(command.Data, "path");
|
ArgUtil.NotNullOrEmpty(command.Data, "path");
|
||||||
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
context.Global.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||||
context.PrependPath.Add(command.Data);
|
context.Global.PrependPath.Add(command.Data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -294,7 +295,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
var file = command.Data;
|
var file = command.Data;
|
||||||
|
|
||||||
@@ -306,9 +307,9 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Translate file path back from container path
|
// Translate file path back from container path
|
||||||
if (context.Container != null)
|
if (container != null)
|
||||||
{
|
{
|
||||||
file = context.Container.TranslateToHostPath(file);
|
file = container.TranslateToHostPath(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Root the path
|
// Root the path
|
||||||
@@ -341,7 +342,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner);
|
command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner);
|
||||||
var file = command.Data;
|
var file = command.Data;
|
||||||
@@ -369,9 +370,9 @@ namespace GitHub.Runner.Worker
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
// Translate file path back from container path
|
// Translate file path back from container path
|
||||||
if (context.Container != null)
|
if (container != null)
|
||||||
{
|
{
|
||||||
file = context.Container.TranslateToHostPath(file);
|
file = container.TranslateToHostPath(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Root the path
|
// Root the path
|
||||||
@@ -409,7 +410,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
context.Debug(command.Data);
|
context.Debug(command.Data);
|
||||||
}
|
}
|
||||||
@@ -437,7 +438,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
command.Properties.TryGetValue(IssueCommandProperties.File, out string file);
|
command.Properties.TryGetValue(IssueCommandProperties.File, out string file);
|
||||||
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
||||||
@@ -454,10 +455,10 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
issue.Category = "Code";
|
issue.Category = "Code";
|
||||||
|
|
||||||
if (context.Container != null)
|
if (container != null)
|
||||||
{
|
{
|
||||||
// Translate file path back from container path
|
// Translate file path back from container path
|
||||||
file = context.Container.TranslateToHostPath(file);
|
file = container.TranslateToHostPath(file);
|
||||||
command.Properties[IssueCommandProperties.File] = file;
|
command.Properties[IssueCommandProperties.File] = file;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -485,7 +486,10 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
foreach (var property in command.Properties)
|
foreach (var property in command.Properties)
|
||||||
{
|
{
|
||||||
issue.Data[property.Key] = property.Value;
|
if (!string.Equals(property.Key, Constants.Runner.InternalTelemetryIssueDataKey, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
issue.Data[property.Key] = property.Value;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
context.AddIssue(issue);
|
context.AddIssue(issue);
|
||||||
@@ -517,7 +521,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
var data = this is GroupCommandExtension ? command.Data : string.Empty;
|
var data = this is GroupCommandExtension ? command.Data : string.Empty;
|
||||||
context.Output($"##[{Command}]{data}");
|
context.Output($"##[{Command}]{data}");
|
||||||
@@ -531,7 +535,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||||
|
|
||||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNullOrEmpty(command.Data, "value");
|
ArgUtil.NotNullOrEmpty(command.Data, "value");
|
||||||
|
|
||||||
|
|||||||
@@ -1,31 +1,43 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.IO.Compression;
|
using System.IO.Compression;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
using System.Threading;
|
using System.Net;
|
||||||
using System.Threading.Tasks;
|
|
||||||
using System.Net.Http;
|
using System.Net.Http;
|
||||||
using System.Net.Http.Headers;
|
using System.Net.Http.Headers;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
using GitHub.DistributedTask.WebApi;
|
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker.Container;
|
using GitHub.Runner.Worker.Container;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using Newtonsoft.Json;
|
using WebApi = GitHub.DistributedTask.WebApi;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
{
|
{
|
||||||
|
public class PrepareResult
|
||||||
|
{
|
||||||
|
public PrepareResult(List<JobExtensionRunner> containerSetupSteps, Dictionary<Guid, IActionRunner> preStepTracker)
|
||||||
|
{
|
||||||
|
this.ContainerSetupSteps = containerSetupSteps;
|
||||||
|
this.PreStepTracker = preStepTracker;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<JobExtensionRunner> ContainerSetupSteps { get; set; }
|
||||||
|
|
||||||
|
public Dictionary<Guid, IActionRunner> PreStepTracker { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
[ServiceLocator(Default = typeof(ActionManager))]
|
[ServiceLocator(Default = typeof(ActionManager))]
|
||||||
public interface IActionManager : IRunnerService
|
public interface IActionManager : IRunnerService
|
||||||
{
|
{
|
||||||
Dictionary<Guid, ContainerInfo> CachedActionContainers { get; }
|
Dictionary<Guid, ContainerInfo> CachedActionContainers { get; }
|
||||||
Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
|
Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
|
||||||
Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action);
|
Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -33,13 +45,13 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
private const int _defaultFileStreamBufferSize = 4096;
|
private const int _defaultFileStreamBufferSize = 4096;
|
||||||
|
|
||||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||||
private const int _defaultCopyBufferSize = 81920;
|
private const int _defaultCopyBufferSize = 81920;
|
||||||
|
private const string _dotcomApiUrl = "https://api.github.com";
|
||||||
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
|
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
|
||||||
|
|
||||||
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
||||||
public async Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
|
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
|
||||||
{
|
{
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
ArgUtil.NotNull(steps, nameof(steps));
|
ArgUtil.NotNull(steps, nameof(steps));
|
||||||
@@ -49,18 +61,24 @@ namespace GitHub.Runner.Worker
|
|||||||
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||||
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
|
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
|
||||||
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
|
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
|
||||||
|
Dictionary<Guid, IActionRunner> preStepTracker = new Dictionary<Guid, IActionRunner>();
|
||||||
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
|
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
|
||||||
|
|
||||||
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
|
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||||
// Log even if we aren't using it to ensure users know.
|
// Log even if we aren't using it to ensure users know.
|
||||||
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
if (!string.IsNullOrEmpty(executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||||
{
|
{
|
||||||
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is depreciated. Please remove it from the repository's secrets");
|
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear the cache (local runner)
|
// Clear the cache (for self-hosted runners)
|
||||||
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
||||||
|
|
||||||
|
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
|
var newActionMetadata = executionContext.Global.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
|
||||||
|
|
||||||
|
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||||
|
|
||||||
foreach (var action in actions)
|
foreach (var action in actions)
|
||||||
{
|
{
|
||||||
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||||
@@ -78,7 +96,8 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
||||||
imagesToPull[containerReference.Image].Add(action.Id);
|
imagesToPull[containerReference.Image].Add(action.Id);
|
||||||
}
|
}
|
||||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
|
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
|
||||||
{
|
{
|
||||||
// only download the repository archive
|
// only download the repository archive
|
||||||
await DownloadRepositoryActionAsync(executionContext, action);
|
await DownloadRepositoryActionAsync(executionContext, action);
|
||||||
@@ -111,6 +130,97 @@ namespace GitHub.Runner.Worker
|
|||||||
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||||
|
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||||
|
{
|
||||||
|
var definition = LoadAction(executionContext, action);
|
||||||
|
if (definition.Data.Execution.HasPre)
|
||||||
|
{
|
||||||
|
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||||
|
actionRunner.Action = action;
|
||||||
|
actionRunner.Stage = ActionRunStage.Pre;
|
||||||
|
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||||
|
|
||||||
|
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||||
|
preStepTracker[action.Id] = actionRunner;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
|
||||||
|
{
|
||||||
|
repositoryActions.Add(action);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (repositoryActions.Count > 0)
|
||||||
|
{
|
||||||
|
// Get the download info
|
||||||
|
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
|
||||||
|
|
||||||
|
// Download each action
|
||||||
|
foreach (var action in repositoryActions)
|
||||||
|
{
|
||||||
|
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||||
|
if (string.IsNullOrEmpty(lookupKey))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
|
||||||
|
{
|
||||||
|
throw new Exception($"Missing download info for {lookupKey}");
|
||||||
|
}
|
||||||
|
|
||||||
|
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// More preparation based on content in the repository (action.yml)
|
||||||
|
foreach (var action in repositoryActions)
|
||||||
|
{
|
||||||
|
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||||
|
if (setupInfo != null)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(setupInfo.Image))
|
||||||
|
{
|
||||||
|
if (!imagesToPull.ContainsKey(setupInfo.Image))
|
||||||
|
{
|
||||||
|
imagesToPull[setupInfo.Image] = new List<Guid>();
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
|
||||||
|
imagesToPull[setupInfo.Image].Add(action.Id);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
|
||||||
|
|
||||||
|
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
|
||||||
|
{
|
||||||
|
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
|
||||||
|
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
|
||||||
|
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||||
|
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||||
|
{
|
||||||
|
var definition = LoadAction(executionContext, action);
|
||||||
|
if (definition.Data.Execution.HasPre)
|
||||||
|
{
|
||||||
|
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||||
|
actionRunner.Action = action;
|
||||||
|
actionRunner.Stage = ActionRunStage.Pre;
|
||||||
|
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||||
|
|
||||||
|
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||||
|
preStepTracker[action.Id] = actionRunner;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,7 +257,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return containerSetupSteps;
|
return new PrepareResult(containerSetupSteps, preStepTracker);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
|
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
|
||||||
@@ -198,14 +308,21 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Load action that reference repository from '{actionDirectory}'");
|
Trace.Info($"Load action that reference repository from '{actionDirectory}'");
|
||||||
definition.Directory = actionDirectory;
|
definition.Directory = actionDirectory;
|
||||||
|
|
||||||
string manifestFile = Path.Combine(actionDirectory, "action.yml");
|
string manifestFile = Path.Combine(actionDirectory, Constants.Path.ActionManifestYmlFile);
|
||||||
|
string manifestFileYaml = Path.Combine(actionDirectory, Constants.Path.ActionManifestYamlFile);
|
||||||
string dockerFile = Path.Combine(actionDirectory, "Dockerfile");
|
string dockerFile = Path.Combine(actionDirectory, "Dockerfile");
|
||||||
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile");
|
||||||
if (File.Exists(manifestFile))
|
if (File.Exists(manifestFile) || File.Exists(manifestFileYaml))
|
||||||
{
|
{
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||||
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
if (File.Exists(manifestFile))
|
||||||
|
{
|
||||||
|
definition.Data = manifestManager.Load(executionContext, manifestFile);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
definition.Data = manifestManager.Load(executionContext, manifestFileYaml);
|
||||||
|
}
|
||||||
Trace.Verbose($"Action friendly name: '{definition.Data.Name}'");
|
Trace.Verbose($"Action friendly name: '{definition.Data.Name}'");
|
||||||
Trace.Verbose($"Action description: '{definition.Data.Description}'");
|
Trace.Verbose($"Action description: '{definition.Data.Description}'");
|
||||||
|
|
||||||
@@ -232,14 +349,19 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}.");
|
Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(containerAction.Pre))
|
||||||
|
{
|
||||||
|
Trace.Info($"Action container pre entrypoint: {containerAction.Pre}.");
|
||||||
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(containerAction.EntryPoint))
|
if (!string.IsNullOrEmpty(containerAction.EntryPoint))
|
||||||
{
|
{
|
||||||
Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}.");
|
Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(containerAction.Cleanup))
|
if (!string.IsNullOrEmpty(containerAction.Post))
|
||||||
{
|
{
|
||||||
Trace.Info($"Action container cleanup entrypoint: {containerAction.Cleanup}.");
|
Trace.Info($"Action container post entrypoint: {containerAction.Post}.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (CachedActionContainers.TryGetValue(action.Id, out var container))
|
if (CachedActionContainers.TryGetValue(action.Id, out var container))
|
||||||
@@ -251,8 +373,9 @@ namespace GitHub.Runner.Worker
|
|||||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS)
|
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS)
|
||||||
{
|
{
|
||||||
var nodeAction = definition.Data.Execution as NodeJSActionExecutionData;
|
var nodeAction = definition.Data.Execution as NodeJSActionExecutionData;
|
||||||
|
Trace.Info($"Action pre node.js file: {nodeAction.Pre ?? "N/A"}.");
|
||||||
Trace.Info($"Action node.js file: {nodeAction.Script}.");
|
Trace.Info($"Action node.js file: {nodeAction.Script}.");
|
||||||
Trace.Info($"Action cleanup node.js file: {nodeAction.Cleanup ?? "N/A"}.");
|
Trace.Info($"Action post node.js file: {nodeAction.Post ?? "N/A"}.");
|
||||||
}
|
}
|
||||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin)
|
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin)
|
||||||
{
|
{
|
||||||
@@ -268,10 +391,18 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
if (!string.IsNullOrEmpty(plugin.PostPluginTypeName))
|
if (!string.IsNullOrEmpty(plugin.PostPluginTypeName))
|
||||||
{
|
{
|
||||||
pluginAction.Cleanup = plugin.PostPluginTypeName;
|
pluginAction.Post = plugin.PostPluginTypeName;
|
||||||
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
|
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||||
|
{
|
||||||
|
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
|
||||||
|
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
|
||||||
|
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction?.Steps)}");
|
||||||
|
Trace.Info($"Load: {compositeAction.Outputs?.Count ?? 0} number of outputs");
|
||||||
|
Trace.Info($"Details: {StringUtil.ConvertToJson(compositeAction?.Outputs)}");
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
|
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
|
||||||
@@ -314,7 +445,7 @@ namespace GitHub.Runner.Worker
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
var fullPath = IOUtil.ResolvePath(actionDirectory, "."); // resolve full path without access filesystem.
|
var fullPath = IOUtil.ResolvePath(actionDirectory, "."); // resolve full path without access filesystem.
|
||||||
throw new NotSupportedException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
throw new NotSupportedException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Script)
|
else if (action.Reference.Type == Pipelines.ActionSourceType.Script)
|
||||||
@@ -337,7 +468,7 @@ namespace GitHub.Runner.Worker
|
|||||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image));
|
ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image));
|
||||||
|
|
||||||
executionContext.Output($"Pull down action image '{setupInfo.Container.Image}'");
|
executionContext.Output($"##[group]Pull down action image '{setupInfo.Container.Image}'");
|
||||||
|
|
||||||
// Pull down docker image with retry up to 3 times
|
// Pull down docker image with retry up to 3 times
|
||||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||||
@@ -361,6 +492,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
|
|
||||||
if (retryCount == 3 && pullExitCode != 0)
|
if (retryCount == 3 && pullExitCode != 0)
|
||||||
{
|
{
|
||||||
@@ -380,7 +512,7 @@ namespace GitHub.Runner.Worker
|
|||||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile));
|
ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile));
|
||||||
|
|
||||||
executionContext.Output($"Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
executionContext.Output($"##[group]Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||||
|
|
||||||
// Build docker image with retry up to 3 times
|
// Build docker image with retry up to 3 times
|
||||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||||
@@ -389,7 +521,12 @@ namespace GitHub.Runner.Worker
|
|||||||
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
|
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
|
||||||
while (retryCount < 3)
|
while (retryCount < 3)
|
||||||
{
|
{
|
||||||
buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName);
|
buildExitCode = await dockerManger.DockerBuild(
|
||||||
|
executionContext,
|
||||||
|
setupInfo.Container.WorkingDirectory,
|
||||||
|
setupInfo.Container.Dockerfile,
|
||||||
|
Directory.GetParent(setupInfo.Container.Dockerfile).FullName,
|
||||||
|
imageName);
|
||||||
if (buildExitCode == 0)
|
if (buildExitCode == 0)
|
||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
@@ -405,6 +542,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
|
|
||||||
if (retryCount == 3 && buildExitCode != 0)
|
if (retryCount == 3 && buildExitCode != 0)
|
||||||
{
|
{
|
||||||
@@ -418,6 +556,80 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
|
||||||
|
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
|
||||||
|
{
|
||||||
|
executionContext.Output("Getting action download info");
|
||||||
|
|
||||||
|
// Convert to action reference
|
||||||
|
var actionReferences = actions
|
||||||
|
.GroupBy(x => GetDownloadInfoLookupKey(x))
|
||||||
|
.Where(x => !string.IsNullOrEmpty(x.Key))
|
||||||
|
.Select(x =>
|
||||||
|
{
|
||||||
|
var action = x.First();
|
||||||
|
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
|
||||||
|
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
|
||||||
|
return new WebApi.ActionReference
|
||||||
|
{
|
||||||
|
NameWithOwner = repositoryReference.Name,
|
||||||
|
Ref = repositoryReference.Ref,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
// Nothing to resolve?
|
||||||
|
if (actionReferences.Count == 0)
|
||||||
|
{
|
||||||
|
return new Dictionary<string, WebApi.ActionDownloadInfo>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve download info
|
||||||
|
var jobServer = HostContext.GetService<IJobServer>();
|
||||||
|
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
|
||||||
|
for (var attempt = 1; attempt <= 3; attempt++)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (attempt < 3)
|
||||||
|
{
|
||||||
|
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
|
||||||
|
executionContext.Debug(ex.ToString());
|
||||||
|
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||||
|
{
|
||||||
|
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||||
|
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
|
||||||
|
await Task.Delay(backoff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
|
||||||
|
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
|
||||||
|
var apiUrl = GetApiUrl(executionContext);
|
||||||
|
var defaultAccessToken = executionContext.GetGitHubContext("token");
|
||||||
|
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||||
|
var runnerSettings = configurationStore.GetSettings();
|
||||||
|
|
||||||
|
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
|
||||||
|
{
|
||||||
|
// Add secret
|
||||||
|
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
|
||||||
|
|
||||||
|
// Default auth token
|
||||||
|
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
|
||||||
|
{
|
||||||
|
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return actionDownloadInfos.Actions;
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
@@ -441,7 +653,8 @@ namespace GitHub.Runner.Worker
|
|||||||
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
||||||
|
|
||||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
|
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
|
||||||
if (File.Exists(destDirectory + ".completed"))
|
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||||
|
if (File.Exists(watermarkFile))
|
||||||
{
|
{
|
||||||
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
|
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
|
||||||
return;
|
return;
|
||||||
@@ -454,30 +667,119 @@ namespace GitHub.Runner.Worker
|
|||||||
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
|
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
|
||||||
}
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||||
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}";
|
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
|
||||||
#else
|
if (isHostedServer)
|
||||||
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}";
|
{
|
||||||
#endif
|
string apiUrl = GetApiUrl(executionContext);
|
||||||
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
|
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
|
||||||
|
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
|
||||||
|
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
string apiUrl = GetApiUrl(executionContext);
|
||||||
|
|
||||||
|
// URLs to try:
|
||||||
|
var downloadAttempts = new List<ActionDownloadDetails> {
|
||||||
|
// A built-in action or an action the user has created, on their GHES instance
|
||||||
|
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
|
||||||
|
new ActionDownloadDetails(
|
||||||
|
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||||
|
ConfigureAuthorizationFromContext),
|
||||||
|
|
||||||
|
// The same action, on GitHub.com
|
||||||
|
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
|
||||||
|
new ActionDownloadDetails(
|
||||||
|
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||||
|
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
|
||||||
|
};
|
||||||
|
|
||||||
|
foreach (var downloadAttempt in downloadAttempts)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
catch (ActionNotFoundException)
|
||||||
|
{
|
||||||
|
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
|
||||||
|
{
|
||||||
|
Trace.Entering();
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
|
||||||
|
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
|
||||||
|
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
|
||||||
|
|
||||||
|
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
|
||||||
|
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||||
|
if (File.Exists(watermarkFile))
|
||||||
|
{
|
||||||
|
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// make sure we get a clean folder ready to use.
|
||||||
|
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
|
||||||
|
Directory.CreateDirectory(destDirectory);
|
||||||
|
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
|
||||||
|
}
|
||||||
|
|
||||||
|
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
private string GetApiUrl(IExecutionContext executionContext)
|
||||||
|
{
|
||||||
|
string apiUrl = executionContext.GetGitHubContext("api_url");
|
||||||
|
if (!string.IsNullOrEmpty(apiUrl))
|
||||||
|
{
|
||||||
|
return apiUrl;
|
||||||
|
}
|
||||||
|
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
|
||||||
|
return _dotcomApiUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
|
||||||
|
{
|
||||||
|
#if OS_WINDOWS
|
||||||
|
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
|
||||||
|
#else
|
||||||
|
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
|
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
||||||
|
{
|
||||||
//download and extract action in a temp folder and rename it on success
|
//download and extract action in a temp folder and rename it on success
|
||||||
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
||||||
Directory.CreateDirectory(tempDirectory);
|
Directory.CreateDirectory(tempDirectory);
|
||||||
|
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
|
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
|
||||||
|
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||||
#else
|
#else
|
||||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
|
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
|
||||||
|
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||||
#endif
|
#endif
|
||||||
Trace.Info($"Save archive '{archiveLink}' into {archiveFile}.");
|
|
||||||
|
Trace.Info($"Save archive '{link}' into {archiveFile}.");
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
|
||||||
int retryCount = 0;
|
int retryCount = 0;
|
||||||
|
|
||||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||||
int timeoutSeconds = 20 * 60;
|
int timeoutSeconds = 20 * 60;
|
||||||
while (retryCount < 3)
|
while (retryCount < 3)
|
||||||
{
|
{
|
||||||
@@ -491,55 +793,67 @@ namespace GitHub.Runner.Worker
|
|||||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||||
using (var httpClient = new HttpClient(httpClientHandler))
|
using (var httpClient = new HttpClient(httpClientHandler))
|
||||||
{
|
{
|
||||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
// Legacy
|
||||||
if (string.IsNullOrEmpty(authToken))
|
if (downloadInfo == null)
|
||||||
{
|
{
|
||||||
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
|
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
|
||||||
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(authToken))
|
|
||||||
{
|
|
||||||
HostContext.SecretMasker.AddValue(authToken);
|
|
||||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
|
||||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
|
||||||
}
|
}
|
||||||
|
// FF DistributedTask.NewActionMetadata
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
var accessToken = executionContext.GetGitHubContext("token");
|
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
||||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
|
||||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
|
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||||
using (var result = await httpClient.GetStreamAsync(archiveLink))
|
using (var response = await httpClient.GetAsync(link))
|
||||||
{
|
{
|
||||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
if (response.IsSuccessStatusCode)
|
||||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
{
|
||||||
|
using (var result = await response.Content.ReadAsStreamAsync())
|
||||||
|
{
|
||||||
|
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||||
|
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||||
|
|
||||||
// download succeed, break out the retry loop.
|
// download succeed, break out the retry loop.
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||||
|
{
|
||||||
|
// It doesn't make sense to retry in this case, so just stop
|
||||||
|
throw new ActionNotFoundException(new Uri(link));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// Something else bad happened, let's go to our retry logic
|
||||||
|
response.EnsureSuccessStatusCode();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
Trace.Info($"Action download has been cancelled.");
|
Trace.Info("Action download has been cancelled.");
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
catch (ActionNotFoundException)
|
||||||
|
{
|
||||||
|
Trace.Info($"The action at '{link}' does not exist");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
catch (Exception ex) when (retryCount < 2)
|
catch (Exception ex) when (retryCount < 2)
|
||||||
{
|
{
|
||||||
retryCount++;
|
retryCount++;
|
||||||
Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}");
|
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||||
{
|
{
|
||||||
// action download didn't finish within timeout
|
// action download didn't finish within timeout
|
||||||
executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds.");
|
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}");
|
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -553,7 +867,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||||
executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'");
|
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
|
||||||
|
|
||||||
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
||||||
Directory.CreateDirectory(stagingDirectory);
|
Directory.CreateDirectory(stagingDirectory);
|
||||||
@@ -603,7 +917,8 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
Trace.Verbose("Create watermark file indicate action download succeed.");
|
Trace.Verbose("Create watermark file indicate action download succeed.");
|
||||||
File.WriteAllText(destDirectory + ".completed", DateTime.UtcNow.ToString());
|
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||||
|
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
|
||||||
|
|
||||||
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
||||||
Trace.Info("Finished getting action repository.");
|
Trace.Info("Finished getting action repository.");
|
||||||
@@ -627,6 +942,32 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
|
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
|
||||||
|
{
|
||||||
|
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
||||||
|
if (string.IsNullOrEmpty(authToken))
|
||||||
|
{
|
||||||
|
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||||
|
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.IsNullOrEmpty(authToken))
|
||||||
|
{
|
||||||
|
HostContext.SecretMasker.AddValue(authToken);
|
||||||
|
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
||||||
|
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
var accessToken = executionContext.GetGitHubContext("token");
|
||||||
|
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
||||||
|
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
||||||
|
|
||||||
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||||
{
|
{
|
||||||
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
|
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
|
||||||
@@ -655,12 +996,21 @@ namespace GitHub.Runner.Worker
|
|||||||
// find the docker file or action.yml file
|
// find the docker file or action.yml file
|
||||||
var dockerFile = Path.Combine(actionEntryDirectory, "Dockerfile");
|
var dockerFile = Path.Combine(actionEntryDirectory, "Dockerfile");
|
||||||
var dockerFileLowerCase = Path.Combine(actionEntryDirectory, "dockerfile");
|
var dockerFileLowerCase = Path.Combine(actionEntryDirectory, "dockerfile");
|
||||||
var actionManifest = Path.Combine(actionEntryDirectory, "action.yml");
|
var actionManifest = Path.Combine(actionEntryDirectory, Constants.Path.ActionManifestYmlFile);
|
||||||
if (File.Exists(actionManifest))
|
var actionManifestYaml = Path.Combine(actionEntryDirectory, Constants.Path.ActionManifestYamlFile);
|
||||||
|
if (File.Exists(actionManifest) || File.Exists(actionManifestYaml))
|
||||||
{
|
{
|
||||||
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
executionContext.Debug($"action.yml for action: '{actionManifest}'.");
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||||
var actionDefinitionData = manifestManager.Load(executionContext, actionManifest);
|
ActionDefinitionData actionDefinitionData = null;
|
||||||
|
if (File.Exists(actionManifest))
|
||||||
|
{
|
||||||
|
actionDefinitionData = manifestManager.Load(executionContext, actionManifest);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
actionDefinitionData = manifestManager.Load(executionContext, actionManifestYaml);
|
||||||
|
}
|
||||||
|
|
||||||
if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container)
|
if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container)
|
||||||
{
|
{
|
||||||
@@ -698,6 +1048,11 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
|
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||||
|
{
|
||||||
|
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
|
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
|
||||||
@@ -720,7 +1075,65 @@ namespace GitHub.Runner.Worker
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
var fullPath = IOUtil.ResolvePath(actionEntryDirectory, "."); // resolve full path without access filesystem.
|
var fullPath = IOUtil.ResolvePath(actionEntryDirectory, "."); // resolve full path without access filesystem.
|
||||||
throw new InvalidOperationException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
|
||||||
|
{
|
||||||
|
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
|
||||||
|
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
|
||||||
|
|
||||||
|
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
throw new NotSupportedException(repositoryReference.RepositoryType);
|
||||||
|
}
|
||||||
|
|
||||||
|
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
|
||||||
|
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
||||||
|
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
|
||||||
|
{
|
||||||
|
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
|
||||||
|
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
|
||||||
|
return $"{info.NameWithOwner}@{info.Ref}";
|
||||||
|
}
|
||||||
|
|
||||||
|
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(token))
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||||
|
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||||
|
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||||
|
private class ActionDownloadDetails
|
||||||
|
{
|
||||||
|
public string ArchiveLink { get; }
|
||||||
|
|
||||||
|
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
|
||||||
|
|
||||||
|
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
|
||||||
|
{
|
||||||
|
ArchiveLink = archiveLink;
|
||||||
|
ConfigureAuthorization = configureAuthorization;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -750,13 +1163,15 @@ namespace GitHub.Runner.Worker
|
|||||||
NodeJS,
|
NodeJS,
|
||||||
Plugin,
|
Plugin,
|
||||||
Script,
|
Script,
|
||||||
|
Composite,
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ContainerActionExecutionData : ActionExecutionData
|
public sealed class ContainerActionExecutionData : ActionExecutionData
|
||||||
{
|
{
|
||||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
|
||||||
|
|
||||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||||
|
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||||
|
|
||||||
public string Image { get; set; }
|
public string Image { get; set; }
|
||||||
|
|
||||||
@@ -766,51 +1181,75 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public MappingToken Environment { get; set; }
|
public MappingToken Environment { get; set; }
|
||||||
|
|
||||||
public string Cleanup { get; set; }
|
public string Pre { get; set; }
|
||||||
|
|
||||||
|
public string Post { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class NodeJSActionExecutionData : ActionExecutionData
|
public sealed class NodeJSActionExecutionData : ActionExecutionData
|
||||||
{
|
{
|
||||||
public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS;
|
public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS;
|
||||||
|
|
||||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||||
|
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||||
|
|
||||||
public string Script { get; set; }
|
public string Script { get; set; }
|
||||||
|
|
||||||
public string Cleanup { get; set; }
|
public string Pre { get; set; }
|
||||||
|
|
||||||
|
public string Post { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class PluginActionExecutionData : ActionExecutionData
|
public sealed class PluginActionExecutionData : ActionExecutionData
|
||||||
{
|
{
|
||||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin;
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin;
|
||||||
|
|
||||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
public override bool HasPre => false;
|
||||||
|
|
||||||
|
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||||
|
|
||||||
public string Plugin { get; set; }
|
public string Plugin { get; set; }
|
||||||
|
|
||||||
public string Cleanup { get; set; }
|
public string Post { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ScriptActionExecutionData : ActionExecutionData
|
public sealed class ScriptActionExecutionData : ActionExecutionData
|
||||||
{
|
{
|
||||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Script;
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Script;
|
||||||
|
public override bool HasPre => false;
|
||||||
|
public override bool HasPost => false;
|
||||||
|
}
|
||||||
|
|
||||||
public override bool HasCleanup => false;
|
public sealed class CompositeActionExecutionData : ActionExecutionData
|
||||||
|
{
|
||||||
|
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
|
||||||
|
public override bool HasPre => false;
|
||||||
|
public override bool HasPost => false;
|
||||||
|
public List<Pipelines.ActionStep> Steps { get; set; }
|
||||||
|
public MappingToken Outputs { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract class ActionExecutionData
|
public abstract class ActionExecutionData
|
||||||
{
|
{
|
||||||
|
private string _initCondition = $"{Constants.Expressions.Always}()";
|
||||||
private string _cleanupCondition = $"{Constants.Expressions.Always}()";
|
private string _cleanupCondition = $"{Constants.Expressions.Always}()";
|
||||||
|
|
||||||
public abstract ActionExecutionType ExecutionType { get; }
|
public abstract ActionExecutionType ExecutionType { get; }
|
||||||
|
|
||||||
public abstract bool HasCleanup { get; }
|
public abstract bool HasPre { get; }
|
||||||
|
public abstract bool HasPost { get; }
|
||||||
|
|
||||||
public string CleanupCondition
|
public string CleanupCondition
|
||||||
{
|
{
|
||||||
get { return _cleanupCondition; }
|
get { return _cleanupCondition; }
|
||||||
set { _cleanupCondition = value; }
|
set { _cleanupCondition = value; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public string InitCondition
|
||||||
|
{
|
||||||
|
get { return _initCondition; }
|
||||||
|
set { _initCondition = value; }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class ContainerSetupInfo
|
public class ContainerSetupInfo
|
||||||
@@ -847,4 +1286,3 @@ namespace GitHub.Runner.Worker
|
|||||||
public string ActionRepository { get; set; }
|
public string ActionRepository { get; set; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ using YamlDotNet.Core;
|
|||||||
using YamlDotNet.Core.Events;
|
using YamlDotNet.Core.Events;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
{
|
{
|
||||||
@@ -22,17 +23,18 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||||
|
|
||||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> contextData);
|
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> contextData);
|
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token, IDictionary<string, PipelineContextData> contextData);
|
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||||
|
|
||||||
|
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
|
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
|
||||||
{
|
{
|
||||||
private TemplateSchema _actionManifestSchema;
|
private TemplateSchema _actionManifestSchema;
|
||||||
|
|
||||||
public override void Initialize(IHostContext hostContext)
|
public override void Initialize(IHostContext hostContext)
|
||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
@@ -53,22 +55,45 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||||
{
|
{
|
||||||
var context = CreateContext(executionContext, null);
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
ActionDefinitionData actionDefinition = new ActionDefinitionData();
|
ActionDefinitionData actionDefinition = new ActionDefinitionData();
|
||||||
|
|
||||||
|
// Clean up file name real quick
|
||||||
|
// Instead of using Regex which can be computationally expensive,
|
||||||
|
// we can just remove the # of characters from the fileName according to the length of the basePath
|
||||||
|
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
|
||||||
|
string fileRelativePath = manifestFile;
|
||||||
|
if (manifestFile.Contains(basePath))
|
||||||
|
{
|
||||||
|
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
|
||||||
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var token = default(TemplateToken);
|
var token = default(TemplateToken);
|
||||||
|
|
||||||
// Get the file ID
|
// Get the file ID
|
||||||
var fileId = context.GetFileId(manifestFile);
|
var fileId = templateContext.GetFileId(fileRelativePath);
|
||||||
|
|
||||||
|
// Add this file to the FileTable in executionContext if it hasn't been added already
|
||||||
|
// we use > since fileID is 1 indexed
|
||||||
|
if (fileId > executionContext.Global.FileTable.Count)
|
||||||
|
{
|
||||||
|
executionContext.Global.FileTable.Add(fileRelativePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the file
|
||||||
var fileContent = File.ReadAllText(manifestFile);
|
var fileContent = File.ReadAllText(manifestFile);
|
||||||
using (var stringReader = new StringReader(fileContent))
|
using (var stringReader = new StringReader(fileContent))
|
||||||
{
|
{
|
||||||
var yamlObjectReader = new YamlObjectReader(null, stringReader);
|
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
|
||||||
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
|
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
|
||||||
}
|
}
|
||||||
|
|
||||||
var actionMapping = token.AssertMapping("action manifest root");
|
var actionMapping = token.AssertMapping("action manifest root");
|
||||||
|
var actionOutputs = default(MappingToken);
|
||||||
|
var actionRunValueToken = default(TemplateToken);
|
||||||
|
|
||||||
foreach (var actionPair in actionMapping)
|
foreach (var actionPair in actionMapping)
|
||||||
{
|
{
|
||||||
var propertyName = actionPair.Key.AssertString($"action.yml property key");
|
var propertyName = actionPair.Key.AssertString($"action.yml property key");
|
||||||
@@ -79,44 +104,56 @@ namespace GitHub.Runner.Worker
|
|||||||
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
|
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case "outputs":
|
||||||
|
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||||
|
break;
|
||||||
|
|
||||||
case "description":
|
case "description":
|
||||||
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
|
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "inputs":
|
case "inputs":
|
||||||
ConvertInputs(context, actionPair.Value, actionDefinition);
|
ConvertInputs(actionPair.Value, actionDefinition);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "runs":
|
case "runs":
|
||||||
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
|
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
|
||||||
|
actionRunValueToken = actionPair.Value;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
Trace.Info($"Ignore action property {propertyName}.");
|
Trace.Info($"Ignore action property {propertyName}.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Evaluate Runs Last
|
||||||
|
if (actionRunValueToken != null)
|
||||||
|
{
|
||||||
|
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
context.Errors.Add(ex);
|
templateContext.Errors.Add(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (context.Errors.Count > 0)
|
if (templateContext.Errors.Count > 0)
|
||||||
{
|
{
|
||||||
foreach (var error in context.Errors)
|
foreach (var error in templateContext.Errors)
|
||||||
{
|
{
|
||||||
Trace.Error($"Action.yml load error: {error.Message}");
|
Trace.Error($"Action.yml load error: {error.Message}");
|
||||||
executionContext.Error(error.Message);
|
executionContext.Error(error.Message);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new ArgumentException($"Fail to load {manifestFile}");
|
throw new ArgumentException($"Fail to load {fileRelativePath}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (actionDefinition.Execution == null)
|
if (actionDefinition.Execution == null)
|
||||||
{
|
{
|
||||||
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||||
throw new ArgumentException($"Top level 'run:' section is required for {manifestFile}");
|
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -126,20 +163,47 @@ namespace GitHub.Runner.Worker
|
|||||||
return actionDefinition;
|
return actionDefinition;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public DictionaryContextData EvaluateCompositeOutputs(
|
||||||
|
IExecutionContext executionContext,
|
||||||
|
TemplateToken token,
|
||||||
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
|
{
|
||||||
|
var result = default(DictionaryContextData);
|
||||||
|
|
||||||
|
if (token != null)
|
||||||
|
{
|
||||||
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||||
|
}
|
||||||
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
|
{
|
||||||
|
templateContext.Errors.Add(ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
}
|
||||||
|
|
||||||
|
return result ?? new DictionaryContextData();
|
||||||
|
}
|
||||||
|
|
||||||
public List<string> EvaluateContainerArguments(
|
public List<string> EvaluateContainerArguments(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
SequenceToken token,
|
SequenceToken token,
|
||||||
IDictionary<string, PipelineContextData> contextData)
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
{
|
{
|
||||||
var result = new List<string>();
|
var result = new List<string>();
|
||||||
|
|
||||||
if (token != null)
|
if (token != null)
|
||||||
{
|
{
|
||||||
var context = CreateContext(executionContext, contextData);
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
@@ -156,10 +220,10 @@ namespace GitHub.Runner.Worker
|
|||||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
{
|
{
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
context.Errors.Add(ex);
|
templateContext.Errors.Add(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@@ -168,17 +232,17 @@ namespace GitHub.Runner.Worker
|
|||||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
MappingToken token,
|
MappingToken token,
|
||||||
IDictionary<string, PipelineContextData> contextData)
|
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||||
{
|
{
|
||||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
if (token != null)
|
if (token != null)
|
||||||
{
|
{
|
||||||
var context = CreateContext(executionContext, contextData);
|
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
@@ -200,10 +264,10 @@ namespace GitHub.Runner.Worker
|
|||||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
{
|
{
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
context.Errors.Add(ex);
|
templateContext.Errors.Add(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@@ -212,17 +276,16 @@ namespace GitHub.Runner.Worker
|
|||||||
public string EvaluateDefaultInput(
|
public string EvaluateDefaultInput(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
string inputName,
|
string inputName,
|
||||||
TemplateToken token,
|
TemplateToken token)
|
||||||
IDictionary<string, PipelineContextData> contextData)
|
|
||||||
{
|
{
|
||||||
string result = "";
|
string result = "";
|
||||||
if (token != null)
|
if (token != null)
|
||||||
{
|
{
|
||||||
var context = CreateContext(executionContext, contextData);
|
var templateContext = CreateTemplateContext(executionContext);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
|
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
|
|
||||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||||
|
|
||||||
@@ -232,18 +295,18 @@ namespace GitHub.Runner.Worker
|
|||||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||||
{
|
{
|
||||||
Trace.Error(ex);
|
Trace.Error(ex);
|
||||||
context.Errors.Add(ex);
|
templateContext.Errors.Add(ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
context.Errors.Check();
|
templateContext.Errors.Check();
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private TemplateContext CreateContext(
|
private TemplateContext CreateTemplateContext(
|
||||||
IExecutionContext executionContext,
|
IExecutionContext executionContext,
|
||||||
IDictionary<string, PipelineContextData> contextData)
|
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||||
{
|
{
|
||||||
var result = new TemplateContext
|
var result = new TemplateContext
|
||||||
{
|
{
|
||||||
@@ -257,20 +320,42 @@ namespace GitHub.Runner.Worker
|
|||||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (contextData?.Count > 0)
|
// Expression values from execution context
|
||||||
|
foreach (var pair in executionContext.ExpressionValues)
|
||||||
{
|
{
|
||||||
foreach (var pair in contextData)
|
result.ExpressionValues[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extra expression values
|
||||||
|
if (extraExpressionValues?.Count > 0)
|
||||||
|
{
|
||||||
|
foreach (var pair in extraExpressionValues)
|
||||||
{
|
{
|
||||||
result.ExpressionValues[pair.Key] = pair.Value;
|
result.ExpressionValues[pair.Key] = pair.Value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Expression functions from execution context
|
||||||
|
foreach (var item in executionContext.ExpressionFunctions)
|
||||||
|
{
|
||||||
|
result.ExpressionFunctions.Add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the file table from the Execution Context
|
||||||
|
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
|
||||||
|
{
|
||||||
|
result.GetFileId(executionContext.Global.FileTable[i]);
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private ActionExecutionData ConvertRuns(
|
private ActionExecutionData ConvertRuns(
|
||||||
TemplateContext context,
|
IExecutionContext executionContext,
|
||||||
TemplateToken inputsToken)
|
TemplateContext templateContext,
|
||||||
|
TemplateToken inputsToken,
|
||||||
|
String fileRelativePath,
|
||||||
|
MappingToken outputs = null)
|
||||||
{
|
{
|
||||||
var runsMapping = inputsToken.AssertMapping("runs");
|
var runsMapping = inputsToken.AssertMapping("runs");
|
||||||
var usingToken = default(StringToken);
|
var usingToken = default(StringToken);
|
||||||
@@ -280,9 +365,14 @@ namespace GitHub.Runner.Worker
|
|||||||
var envToken = default(MappingToken);
|
var envToken = default(MappingToken);
|
||||||
var mainToken = default(StringToken);
|
var mainToken = default(StringToken);
|
||||||
var pluginToken = default(StringToken);
|
var pluginToken = default(StringToken);
|
||||||
|
var preToken = default(StringToken);
|
||||||
|
var preEntrypointToken = default(StringToken);
|
||||||
|
var preIfToken = default(StringToken);
|
||||||
var postToken = default(StringToken);
|
var postToken = default(StringToken);
|
||||||
var postEntrypointToken = default(StringToken);
|
var postEntrypointToken = default(StringToken);
|
||||||
var postIfToken = default(StringToken);
|
var postIfToken = default(StringToken);
|
||||||
|
var steps = default(List<Pipelines.Step>);
|
||||||
|
|
||||||
foreach (var run in runsMapping)
|
foreach (var run in runsMapping)
|
||||||
{
|
{
|
||||||
var runsKey = run.Key.AssertString("runs key").Value;
|
var runsKey = run.Key.AssertString("runs key").Value;
|
||||||
@@ -318,6 +408,20 @@ namespace GitHub.Runner.Worker
|
|||||||
case "post-if":
|
case "post-if":
|
||||||
postIfToken = run.Value.AssertString("post-if");
|
postIfToken = run.Value.AssertString("post-if");
|
||||||
break;
|
break;
|
||||||
|
case "pre":
|
||||||
|
preToken = run.Value.AssertString("pre");
|
||||||
|
break;
|
||||||
|
case "pre-entrypoint":
|
||||||
|
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
|
||||||
|
break;
|
||||||
|
case "pre-if":
|
||||||
|
preIfToken = run.Value.AssertString("pre-if");
|
||||||
|
break;
|
||||||
|
case "steps":
|
||||||
|
var stepsToken = run.Value.AssertSequence("steps");
|
||||||
|
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||||
|
templateContext.Errors.Check();
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
Trace.Info($"Ignore run property {runsKey}.");
|
Trace.Info($"Ignore run property {runsKey}.");
|
||||||
break;
|
break;
|
||||||
@@ -330,7 +434,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(imageToken?.Value))
|
if (string.IsNullOrEmpty(imageToken?.Value))
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException($"Image is not provided.");
|
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -340,8 +444,10 @@ namespace GitHub.Runner.Worker
|
|||||||
Arguments = argsToken,
|
Arguments = argsToken,
|
||||||
EntryPoint = entrypointToken?.Value,
|
EntryPoint = entrypointToken?.Value,
|
||||||
Environment = envToken,
|
Environment = envToken,
|
||||||
Cleanup = postEntrypointToken?.Value,
|
Pre = preEntrypointToken?.Value,
|
||||||
CleanupCondition = postIfToken?.Value
|
InitCondition = preIfToken?.Value ?? "always()",
|
||||||
|
Post = postEntrypointToken?.Value,
|
||||||
|
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -349,15 +455,32 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||||
{
|
{
|
||||||
throw new ArgumentNullException($"Entry javascript fils is not provided.");
|
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
return new NodeJSActionExecutionData()
|
return new NodeJSActionExecutionData()
|
||||||
{
|
{
|
||||||
Script = mainToken.Value,
|
Script = mainToken.Value,
|
||||||
Cleanup = postToken?.Value,
|
Pre = preToken?.Value,
|
||||||
CleanupCondition = postIfToken?.Value
|
InitCondition = preIfToken?.Value ?? "always()",
|
||||||
|
Post = postToken?.Value,
|
||||||
|
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
if (steps == null)
|
||||||
|
{
|
||||||
|
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
return new CompositeActionExecutionData()
|
||||||
|
{
|
||||||
|
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||||
|
Outputs = outputs
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -378,7 +501,6 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void ConvertInputs(
|
private void ConvertInputs(
|
||||||
TemplateContext context,
|
|
||||||
TemplateToken inputsToken,
|
TemplateToken inputsToken,
|
||||||
ActionDefinitionData actionDefinition)
|
ActionDefinitionData actionDefinition)
|
||||||
{
|
{
|
||||||
@@ -415,566 +537,5 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Converts a YAML file into a TemplateToken
|
|
||||||
/// </summary>
|
|
||||||
internal sealed class YamlObjectReader : IObjectReader
|
|
||||||
{
|
|
||||||
internal YamlObjectReader(
|
|
||||||
Int32? fileId,
|
|
||||||
TextReader input)
|
|
||||||
{
|
|
||||||
m_fileId = fileId;
|
|
||||||
m_parser = new Parser(input);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean AllowLiteral(out LiteralToken value)
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is Scalar scalar)
|
|
||||||
{
|
|
||||||
// Tag specified
|
|
||||||
if (!string.IsNullOrEmpty(scalar.Tag))
|
|
||||||
{
|
|
||||||
// String tag
|
|
||||||
if (string.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
|
|
||||||
{
|
|
||||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not plain style
|
|
||||||
if (scalar.Style != ScalarStyle.Plain)
|
|
||||||
{
|
|
||||||
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Boolean, Float, Integer, or Null
|
|
||||||
switch (scalar.Tag)
|
|
||||||
{
|
|
||||||
case c_booleanTag:
|
|
||||||
value = ParseBoolean(scalar);
|
|
||||||
break;
|
|
||||||
case c_floatTag:
|
|
||||||
value = ParseFloat(scalar);
|
|
||||||
break;
|
|
||||||
case c_integerTag:
|
|
||||||
value = ParseInteger(scalar);
|
|
||||||
break;
|
|
||||||
case c_nullTag:
|
|
||||||
value = ParseNull(scalar);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
|
|
||||||
}
|
|
||||||
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
if (scalar.Style == ScalarStyle.Plain)
|
|
||||||
{
|
|
||||||
if (MatchNull(scalar, out var nullToken))
|
|
||||||
{
|
|
||||||
value = nullToken;
|
|
||||||
}
|
|
||||||
else if (MatchBoolean(scalar, out var booleanToken))
|
|
||||||
{
|
|
||||||
value = booleanToken;
|
|
||||||
}
|
|
||||||
else if (MatchInteger(scalar, out var numberToken) ||
|
|
||||||
MatchFloat(scalar, out numberToken))
|
|
||||||
{
|
|
||||||
value = numberToken;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
|
||||||
}
|
|
||||||
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise assume string
|
|
||||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean AllowSequenceStart(out SequenceToken value)
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is SequenceStart sequenceStart)
|
|
||||||
{
|
|
||||||
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean AllowSequenceEnd()
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is SequenceEnd)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean AllowMappingStart(out MappingToken value)
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is MappingStart mappingStart)
|
|
||||||
{
|
|
||||||
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean AllowMappingEnd()
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is MappingEnd)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
|
|
||||||
/// </summary>
|
|
||||||
public void ValidateEnd()
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() is DocumentEnd)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected document end parse event");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (EvaluateCurrent() is StreamEnd)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected stream end parse event");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (MoveNext())
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected end of parse events");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// <summary>
|
|
||||||
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
|
|
||||||
/// </summary>
|
|
||||||
public void ValidateStart()
|
|
||||||
{
|
|
||||||
if (EvaluateCurrent() != null)
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Unexpected parser state");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!MoveNext())
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected a parse event");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (EvaluateCurrent() is StreamStart)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected stream start parse event");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (EvaluateCurrent() is DocumentStart)
|
|
||||||
{
|
|
||||||
MoveNext();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException("Expected document start parse event");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private ParsingEvent EvaluateCurrent()
|
|
||||||
{
|
|
||||||
if (m_current == null)
|
|
||||||
{
|
|
||||||
m_current = m_parser.Current;
|
|
||||||
if (m_current != null)
|
|
||||||
{
|
|
||||||
if (m_current is Scalar scalar)
|
|
||||||
{
|
|
||||||
// Verify not using achors
|
|
||||||
if (scalar.Anchor != null)
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (m_current is MappingStart mappingStart)
|
|
||||||
{
|
|
||||||
// Verify not using achors
|
|
||||||
if (mappingStart.Anchor != null)
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (m_current is SequenceStart sequenceStart)
|
|
||||||
{
|
|
||||||
// Verify not using achors
|
|
||||||
if (sequenceStart.Anchor != null)
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (!(m_current is MappingEnd) &&
|
|
||||||
!(m_current is SequenceEnd) &&
|
|
||||||
!(m_current is DocumentStart) &&
|
|
||||||
!(m_current is DocumentEnd) &&
|
|
||||||
!(m_current is StreamStart) &&
|
|
||||||
!(m_current is StreamEnd))
|
|
||||||
{
|
|
||||||
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return m_current;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Boolean MoveNext()
|
|
||||||
{
|
|
||||||
m_current = null;
|
|
||||||
return m_parser.MoveNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
private BooleanToken ParseBoolean(Scalar scalar)
|
|
||||||
{
|
|
||||||
if (MatchBoolean(scalar, out var token))
|
|
||||||
{
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
|
|
||||||
ThrowInvalidValue(scalar, c_booleanTag); // throws
|
|
||||||
return default;
|
|
||||||
}
|
|
||||||
|
|
||||||
private NumberToken ParseFloat(Scalar scalar)
|
|
||||||
{
|
|
||||||
if (MatchFloat(scalar, out var token))
|
|
||||||
{
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
|
|
||||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
|
||||||
return default;
|
|
||||||
}
|
|
||||||
|
|
||||||
private NumberToken ParseInteger(Scalar scalar)
|
|
||||||
{
|
|
||||||
if (MatchInteger(scalar, out var token))
|
|
||||||
{
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
|
|
||||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
|
||||||
return default;
|
|
||||||
}
|
|
||||||
|
|
||||||
private NullToken ParseNull(Scalar scalar)
|
|
||||||
{
|
|
||||||
if (MatchNull(scalar, out var token))
|
|
||||||
{
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
|
|
||||||
ThrowInvalidValue(scalar, c_nullTag); // throws
|
|
||||||
return default;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Boolean MatchBoolean(
|
|
||||||
Scalar scalar,
|
|
||||||
out BooleanToken value)
|
|
||||||
{
|
|
||||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
switch (scalar.Value ?? string.Empty)
|
|
||||||
{
|
|
||||||
case "true":
|
|
||||||
case "True":
|
|
||||||
case "TRUE":
|
|
||||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
|
|
||||||
return true;
|
|
||||||
case "false":
|
|
||||||
case "False":
|
|
||||||
case "FALSE":
|
|
||||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Boolean MatchFloat(
|
|
||||||
Scalar scalar,
|
|
||||||
out NumberToken value)
|
|
||||||
{
|
|
||||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
var str = scalar.Value;
|
|
||||||
if (!string.IsNullOrEmpty(str))
|
|
||||||
{
|
|
||||||
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
|
|
||||||
switch (str)
|
|
||||||
{
|
|
||||||
case ".inf":
|
|
||||||
case ".Inf":
|
|
||||||
case ".INF":
|
|
||||||
case "+.inf":
|
|
||||||
case "+.Inf":
|
|
||||||
case "+.INF":
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
|
|
||||||
return true;
|
|
||||||
case "-.inf":
|
|
||||||
case "-.Inf":
|
|
||||||
case "-.INF":
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
|
|
||||||
return true;
|
|
||||||
case ".nan":
|
|
||||||
case ".NaN":
|
|
||||||
case ".NAN":
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
|
|
||||||
|
|
||||||
// Skip leading sign
|
|
||||||
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
|
|
||||||
|
|
||||||
// Check for integer portion
|
|
||||||
var length = str.Length;
|
|
||||||
var hasInteger = false;
|
|
||||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
|
||||||
{
|
|
||||||
hasInteger = true;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for decimal point
|
|
||||||
var hasDot = false;
|
|
||||||
if (index < length && str[index] == '.')
|
|
||||||
{
|
|
||||||
hasDot = true;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for decimal portion
|
|
||||||
var hasDecimal = false;
|
|
||||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
|
||||||
{
|
|
||||||
hasDecimal = true;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
|
|
||||||
if ((hasDot && hasDecimal) || hasInteger)
|
|
||||||
{
|
|
||||||
// Check for end
|
|
||||||
if (index == length)
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
|
|
||||||
{
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Otherwise exceeds range
|
|
||||||
else
|
|
||||||
{
|
|
||||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Check [eE][-+]?[0-9]
|
|
||||||
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
|
|
||||||
{
|
|
||||||
index++;
|
|
||||||
|
|
||||||
// Skip sign
|
|
||||||
if (index < length && (str[index] == '-' || str[index] == '+'))
|
|
||||||
{
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for exponent
|
|
||||||
var hasExponent = false;
|
|
||||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
|
||||||
{
|
|
||||||
hasExponent = true;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for end
|
|
||||||
if (hasExponent && index == length)
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
|
|
||||||
{
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Otherwise exceeds range
|
|
||||||
else
|
|
||||||
{
|
|
||||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Boolean MatchInteger(
|
|
||||||
Scalar scalar,
|
|
||||||
out NumberToken value)
|
|
||||||
{
|
|
||||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
var str = scalar.Value;
|
|
||||||
if (!string.IsNullOrEmpty(str))
|
|
||||||
{
|
|
||||||
// Check for [0-9]+
|
|
||||||
var firstChar = str[0];
|
|
||||||
if (firstChar >= '0' && firstChar <= '9' &&
|
|
||||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
|
|
||||||
{
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise exceeds range
|
|
||||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
|
||||||
}
|
|
||||||
// Check for (-|+)[0-9]+
|
|
||||||
else if ((firstChar == '-' || firstChar == '+') &&
|
|
||||||
str.Length > 1 &&
|
|
||||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
|
|
||||||
{
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise exceeds range
|
|
||||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
|
||||||
}
|
|
||||||
// Check for 0x[0-9a-fA-F]+
|
|
||||||
else if (firstChar == '0' &&
|
|
||||||
str.Length > 2 &&
|
|
||||||
str[1] == 'x' &&
|
|
||||||
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
|
|
||||||
{
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise exceeds range
|
|
||||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
|
||||||
}
|
|
||||||
// Check for 0o[0-9]+
|
|
||||||
else if (firstChar == '0' &&
|
|
||||||
str.Length > 2 &&
|
|
||||||
str[1] == 'o' &&
|
|
||||||
str.Skip(2).All(x => x >= '0' && x <= '7'))
|
|
||||||
{
|
|
||||||
// Try parse
|
|
||||||
var integerValue = default(Int32);
|
|
||||||
try
|
|
||||||
{
|
|
||||||
integerValue = Convert.ToInt32(str.Substring(2), 8);
|
|
||||||
}
|
|
||||||
// Otherwise exceeds range
|
|
||||||
catch (Exception)
|
|
||||||
{
|
|
||||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
|
||||||
}
|
|
||||||
|
|
||||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Boolean MatchNull(
|
|
||||||
Scalar scalar,
|
|
||||||
out NullToken value)
|
|
||||||
{
|
|
||||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
|
||||||
switch (scalar.Value ?? string.Empty)
|
|
||||||
{
|
|
||||||
case "":
|
|
||||||
case "null":
|
|
||||||
case "Null":
|
|
||||||
case "NULL":
|
|
||||||
case "~":
|
|
||||||
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
value = default;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void ThrowInvalidValue(
|
|
||||||
Scalar scalar,
|
|
||||||
String tag)
|
|
||||||
{
|
|
||||||
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
|
|
||||||
}
|
|
||||||
|
|
||||||
private const String c_booleanTag = "tag:yaml.org,2002:bool";
|
|
||||||
private const String c_floatTag = "tag:yaml.org,2002:float";
|
|
||||||
private const String c_integerTag = "tag:yaml.org,2002:int";
|
|
||||||
private const String c_nullTag = "tag:yaml.org,2002:null";
|
|
||||||
private const String c_stringTag = "tag:yaml.org,2002:string";
|
|
||||||
private readonly Int32? m_fileId;
|
|
||||||
private readonly Parser m_parser;
|
|
||||||
private ParsingEvent m_current;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
33
src/Runner.Worker/ActionNotFoundException.cs
Normal file
33
src/Runner.Worker/ActionNotFoundException.cs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
using System;
|
||||||
|
using System.Runtime.Serialization;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker
|
||||||
|
{
|
||||||
|
public class ActionNotFoundException : Exception
|
||||||
|
{
|
||||||
|
public ActionNotFoundException(Uri actionUri)
|
||||||
|
: base(FormatMessage(actionUri))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public ActionNotFoundException(string message)
|
||||||
|
: base(message)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public ActionNotFoundException(string message, System.Exception inner)
|
||||||
|
: base(message, inner)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ActionNotFoundException(SerializationInfo info, StreamingContext context)
|
||||||
|
: base(info, context)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string FormatMessage(Uri actionUri)
|
||||||
|
{
|
||||||
|
return $"An action could not be found at the URI '{actionUri}'";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -18,6 +18,7 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
public enum ActionRunStage
|
public enum ActionRunStage
|
||||||
{
|
{
|
||||||
|
Pre,
|
||||||
Main,
|
Main,
|
||||||
Post,
|
Post,
|
||||||
}
|
}
|
||||||
@@ -26,7 +27,7 @@ namespace GitHub.Runner.Worker
|
|||||||
public interface IActionRunner : IStep, IRunnerService
|
public interface IActionRunner : IStep, IRunnerService
|
||||||
{
|
{
|
||||||
ActionRunStage Stage { get; set; }
|
ActionRunStage Stage { get; set; }
|
||||||
Boolean TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
|
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
|
||||||
Pipelines.ActionStep Action { get; set; }
|
Pipelines.ActionStep Action { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -81,20 +82,25 @@ namespace GitHub.Runner.Worker
|
|||||||
ActionExecutionData handlerData = definition.Data?.Execution;
|
ActionExecutionData handlerData = definition.Data?.Execution;
|
||||||
ArgUtil.NotNull(handlerData, nameof(handlerData));
|
ArgUtil.NotNull(handlerData, nameof(handlerData));
|
||||||
|
|
||||||
|
if (handlerData.HasPre &&
|
||||||
|
Action.Reference is Pipelines.RepositoryPathReference repoAction &&
|
||||||
|
string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
ExecutionContext.Warning($"`pre` execution is not supported for local action from '{repoAction.Path}'");
|
||||||
|
}
|
||||||
|
|
||||||
// The action has post cleanup defined.
|
// The action has post cleanup defined.
|
||||||
// we need to create timeline record for them and add them to the step list that StepRunner is using
|
// we need to create timeline record for them and add them to the step list that StepRunner is using
|
||||||
if (handlerData.HasCleanup && Stage == ActionRunStage.Main)
|
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
|
||||||
{
|
{
|
||||||
string postDisplayName = null;
|
string postDisplayName = $"Post {this.DisplayName}";
|
||||||
if (this.DisplayName.StartsWith(PipelineTemplateConstants.RunDisplayPrefix))
|
if (Stage == ActionRunStage.Pre &&
|
||||||
|
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
|
||||||
{
|
{
|
||||||
postDisplayName = $"Post {this.DisplayName.Substring(PipelineTemplateConstants.RunDisplayPrefix.Length)}";
|
// Trim the leading `Pre ` from the display name.
|
||||||
|
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
|
||||||
|
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
|
||||||
postDisplayName = $"Post {this.DisplayName}";
|
|
||||||
}
|
|
||||||
|
|
||||||
var repositoryReference = Action.Reference as RepositoryPathReference;
|
var repositoryReference = Action.Reference as RepositoryPathReference;
|
||||||
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
|
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
|
||||||
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
|
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
|
||||||
@@ -108,7 +114,7 @@ namespace GitHub.Runner.Worker
|
|||||||
actionRunner.Condition = handlerData.CleanupCondition;
|
actionRunner.Condition = handlerData.CleanupCondition;
|
||||||
actionRunner.DisplayName = postDisplayName;
|
actionRunner.DisplayName = postDisplayName;
|
||||||
|
|
||||||
ExecutionContext.RegisterPostJobStep($"{actionRunner.Action.Name}_post", actionRunner);
|
ExecutionContext.RegisterPostJobStep(actionRunner);
|
||||||
}
|
}
|
||||||
|
|
||||||
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
|
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||||
@@ -130,24 +136,28 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Setup container stephost for running inside the container.
|
// Setup container stephost for running inside the container.
|
||||||
if (ExecutionContext.Container != null)
|
if (ExecutionContext.Global.Container != null)
|
||||||
{
|
{
|
||||||
// Make sure required container is already created.
|
// Make sure required container is already created.
|
||||||
ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId));
|
ArgUtil.NotNullOrEmpty(ExecutionContext.Global.Container.ContainerId, nameof(ExecutionContext.Global.Container.ContainerId));
|
||||||
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
|
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
|
||||||
containerStepHost.Container = ExecutionContext.Container;
|
containerStepHost.Container = ExecutionContext.Global.Container;
|
||||||
stepHost = containerStepHost;
|
stepHost = containerStepHost;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Setup File Command Manager
|
||||||
|
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
|
||||||
|
fileCommandManager.InitializeFiles(ExecutionContext, null);
|
||||||
|
|
||||||
// Load the inputs.
|
// Load the inputs.
|
||||||
ExecutionContext.Debug("Loading inputs");
|
ExecutionContext.Debug("Loading inputs");
|
||||||
var templateTrace = ExecutionContext.ToTemplateTraceWriter();
|
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
|
||||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
|
||||||
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
|
|
||||||
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues);
|
|
||||||
|
|
||||||
|
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
foreach (KeyValuePair<string, string> input in inputs)
|
foreach (KeyValuePair<string, string> input in inputs)
|
||||||
{
|
{
|
||||||
|
userInputs.Add(input.Key);
|
||||||
string message = "";
|
string message = "";
|
||||||
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
|
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
|
||||||
{
|
{
|
||||||
@@ -155,41 +165,60 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
if (handlerData.ExecutionType == ActionExecutionType.Container)
|
||||||
|
{
|
||||||
|
// container action always accept 'entryPoint' and 'args' as inputs
|
||||||
|
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
|
||||||
|
validInputs.Add("entryPoint");
|
||||||
|
validInputs.Add("args");
|
||||||
|
}
|
||||||
// Merge the default inputs from the definition
|
// Merge the default inputs from the definition
|
||||||
if (definition.Data?.Inputs != null)
|
if (definition.Data?.Inputs != null)
|
||||||
{
|
{
|
||||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||||
foreach (var input in (definition.Data?.Inputs))
|
foreach (var input in definition.Data.Inputs)
|
||||||
{
|
{
|
||||||
string key = input.Key.AssertString("action input name").Value;
|
string key = input.Key.AssertString("action input name").Value;
|
||||||
|
validInputs.Add(key);
|
||||||
if (!inputs.ContainsKey(key))
|
if (!inputs.ContainsKey(key))
|
||||||
{
|
{
|
||||||
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
|
||||||
foreach (var data in ExecutionContext.ExpressionValues)
|
|
||||||
{
|
|
||||||
evaluateContext[data.Key] = data.Value;
|
|
||||||
}
|
|
||||||
|
|
||||||
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value, evaluateContext);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate inputs only for actions with action.yml
|
||||||
|
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||||
|
{
|
||||||
|
var unexpectedInputs = new List<string>();
|
||||||
|
foreach (var input in userInputs)
|
||||||
|
{
|
||||||
|
if (!validInputs.Contains(input))
|
||||||
|
{
|
||||||
|
unexpectedInputs.Add(input);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (unexpectedInputs.Count > 0)
|
||||||
|
{
|
||||||
|
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Load the action environment.
|
// Load the action environment.
|
||||||
ExecutionContext.Debug("Loading env");
|
ExecutionContext.Debug("Loading env");
|
||||||
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
|
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
|
||||||
|
|
||||||
// Apply environment set using ##[set-env] first since these are job level env
|
#if OS_WINDOWS
|
||||||
foreach (var env in ExecutionContext.EnvironmentVariables)
|
var envContext = ExecutionContext.ExpressionValues["env"] as DictionaryContextData;
|
||||||
|
#else
|
||||||
|
var envContext = ExecutionContext.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||||
|
#endif
|
||||||
|
// Apply environment from env context, env context contains job level env and action's evn block
|
||||||
|
foreach (var env in envContext)
|
||||||
{
|
{
|
||||||
environment[env.Key] = env.Value ?? string.Empty;
|
environment[env.Key] = env.Value.ToString();
|
||||||
}
|
|
||||||
|
|
||||||
// Apply action's env block later.
|
|
||||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(Action.Environment, ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
|
|
||||||
foreach (var env in actionEnvironment)
|
|
||||||
{
|
|
||||||
environment[env.Key] = env.Value ?? string.Empty;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply action's intra-action state at last
|
// Apply action's intra-action state at last
|
||||||
@@ -206,14 +235,22 @@ namespace GitHub.Runner.Worker
|
|||||||
handlerData,
|
handlerData,
|
||||||
inputs,
|
inputs,
|
||||||
environment,
|
environment,
|
||||||
ExecutionContext.Variables,
|
ExecutionContext.Global.Variables,
|
||||||
actionDirectory: definition.Directory);
|
actionDirectory: definition.Directory);
|
||||||
|
|
||||||
// Print out action details
|
// Print out action details
|
||||||
handler.PrintActionDetails(Stage);
|
handler.PrintActionDetails(Stage);
|
||||||
|
|
||||||
// Run the task.
|
// Run the task.
|
||||||
await handler.RunAsync(Stage);
|
try
|
||||||
|
{
|
||||||
|
await handler.RunAsync(Stage);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
fileCommandManager.ProcessFiles(ExecutionContext, ExecutionContext.Global.Container);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
|
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
|
||||||
@@ -297,11 +334,14 @@ namespace GitHub.Runner.Worker
|
|||||||
return displayName;
|
return displayName;
|
||||||
}
|
}
|
||||||
// Try evaluating fully
|
// Try evaluating fully
|
||||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
|
||||||
var templateEvaluator = new PipelineTemplateEvaluator(context.ToTemplateTraceWriter(), schema);
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName);
|
if (tokenToParse.CheckHasRequiredContext(contextData, context.ExpressionFunctions))
|
||||||
|
{
|
||||||
|
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||||
|
displayName = templateEvaluator.EvaluateStepDisplayName(tokenToParse, contextData, context.ExpressionFunctions);
|
||||||
|
didFullyEvaluate = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
catch (TemplateValidationException e)
|
catch (TemplateValidationException e)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -2,9 +2,9 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker.Container
|
namespace GitHub.Runner.Worker.Container
|
||||||
{
|
{
|
||||||
@@ -19,7 +19,6 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
|
|
||||||
public ContainerInfo()
|
public ContainerInfo()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null)
|
public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null)
|
||||||
@@ -62,8 +61,11 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
foreach (var volume in container.Volumes)
|
foreach (var volume in container.Volumes)
|
||||||
{
|
{
|
||||||
UserMountVolumes[volume] = volume;
|
UserMountVolumes[volume] = volume;
|
||||||
|
MountVolumes.Add(new MountVolume(volume));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
UpdateWebProxyEnv(hostContext.WebProxy);
|
||||||
}
|
}
|
||||||
|
|
||||||
public string ContainerId { get; set; }
|
public string ContainerId { get; set; }
|
||||||
@@ -223,6 +225,26 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
{
|
{
|
||||||
_pathMappings.Insert(0, new PathMapping(hostCommonPath, containerCommonPath));
|
_pathMappings.Insert(0, new PathMapping(hostCommonPath, containerCommonPath));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void UpdateWebProxyEnv(RunnerWebProxy webProxy)
|
||||||
|
{
|
||||||
|
// Set common forms of proxy variables if configured in Runner and not set directly by container.env
|
||||||
|
if (!String.IsNullOrEmpty(webProxy.HttpProxyAddress))
|
||||||
|
{
|
||||||
|
ContainerEnvironmentVariables.TryAdd("HTTP_PROXY", webProxy.HttpProxyAddress);
|
||||||
|
ContainerEnvironmentVariables.TryAdd("http_proxy", webProxy.HttpProxyAddress);
|
||||||
|
}
|
||||||
|
if (!String.IsNullOrEmpty(webProxy.HttpsProxyAddress))
|
||||||
|
{
|
||||||
|
ContainerEnvironmentVariables.TryAdd("HTTPS_PROXY", webProxy.HttpsProxyAddress);
|
||||||
|
ContainerEnvironmentVariables.TryAdd("https_proxy", webProxy.HttpsProxyAddress);
|
||||||
|
}
|
||||||
|
if (!String.IsNullOrEmpty(webProxy.NoProxyString))
|
||||||
|
{
|
||||||
|
ContainerEnvironmentVariables.TryAdd("NO_PROXY", webProxy.NoProxyString);
|
||||||
|
ContainerEnvironmentVariables.TryAdd("no_proxy", webProxy.NoProxyString);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class MountVolume
|
public class MountVolume
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
string DockerInstanceLabel { get; }
|
string DockerInstanceLabel { get; }
|
||||||
Task<DockerVersion> DockerVersion(IExecutionContext context);
|
Task<DockerVersion> DockerVersion(IExecutionContext context);
|
||||||
Task<int> DockerPull(IExecutionContext context, string image);
|
Task<int> DockerPull(IExecutionContext context, string image);
|
||||||
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag);
|
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
|
||||||
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
|
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
|
||||||
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
|
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
|
||||||
Task<int> DockerStart(IExecutionContext context, string containerId);
|
Task<int> DockerStart(IExecutionContext context, string containerId);
|
||||||
@@ -87,9 +87,9 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
|
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag)
|
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
|
||||||
{
|
{
|
||||||
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken);
|
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} -f \"{dockerFile}\" \"{dockerContext}\"", workingDirectory, context.CancellationToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)
|
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)
|
||||||
@@ -130,6 +130,13 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
// Watermark for GitHub Action environment
|
// Watermark for GitHub Action environment
|
||||||
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
||||||
|
|
||||||
|
// Set CI=true when no one else already set it.
|
||||||
|
// CI=true is common set in most CI provider in GitHub
|
||||||
|
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
|
||||||
|
{
|
||||||
|
dockerOptions.Add("-e CI=true");
|
||||||
|
}
|
||||||
|
|
||||||
foreach (var volume in container.MountVolumes)
|
foreach (var volume in container.MountVolumes)
|
||||||
{
|
{
|
||||||
// replace `"` with `\"` and add `"{0}"` to all path.
|
// replace `"` with `\"` and add `"{0}"` to all path.
|
||||||
@@ -189,6 +196,13 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
// Watermark for GitHub Action environment
|
// Watermark for GitHub Action environment
|
||||||
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
||||||
|
|
||||||
|
// Set CI=true when no one else already set it.
|
||||||
|
// CI=true is common set in most CI provider in GitHub
|
||||||
|
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
|
||||||
|
{
|
||||||
|
dockerOptions.Add("-e CI=true");
|
||||||
|
}
|
||||||
|
|
||||||
if (!string.IsNullOrEmpty(container.ContainerEntryPoint))
|
if (!string.IsNullOrEmpty(container.ContainerEntryPoint))
|
||||||
{
|
{
|
||||||
dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\"");
|
dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\"");
|
||||||
|
|||||||
@@ -47,9 +47,9 @@ namespace GitHub.Runner.Worker
|
|||||||
condition: $"{PipelineTemplateConstants.Always}()",
|
condition: $"{PipelineTemplateConstants.Always}()",
|
||||||
displayName: "Stop containers",
|
displayName: "Stop containers",
|
||||||
data: data);
|
data: data);
|
||||||
|
|
||||||
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
|
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
|
||||||
executionContext.RegisterPostJobStep(nameof(StopContainersAsync), postJobStep);
|
executionContext.RegisterPostJobStep(postJobStep);
|
||||||
|
|
||||||
// Check whether we are inside a container.
|
// Check whether we are inside a container.
|
||||||
// Our container feature requires to map working directory from host to the container.
|
// Our container feature requires to map working directory from host to the container.
|
||||||
@@ -61,16 +61,11 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
|
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
var path = "/proc/1/cgroup";
|
var initProcessCgroup = File.ReadLines("/proc/1/cgroup");
|
||||||
// OSX does not have this file, but you cannot run OSX as a base image for docker containers currently.
|
if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0))
|
||||||
if (File.Exists(path))
|
|
||||||
{
|
{
|
||||||
var initProcessCgroup = File.ReadLines(path);
|
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
|
||||||
if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0))
|
|
||||||
{
|
|
||||||
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -96,7 +91,10 @@ namespace GitHub.Runner.Worker
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Check docker client/server version
|
// Check docker client/server version
|
||||||
|
executionContext.Output("##[group]Checking docker version");
|
||||||
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
|
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
|
|
||||||
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
|
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
|
||||||
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
|
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
|
||||||
|
|
||||||
@@ -116,7 +114,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Clean up containers left by previous runs
|
// Clean up containers left by previous runs
|
||||||
executionContext.Debug($"Delete stale containers from previous jobs");
|
executionContext.Output("##[group]Clean up resources from previous jobs");
|
||||||
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
|
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
|
||||||
foreach (var staleContainer in staleContainers)
|
foreach (var staleContainer in staleContainers)
|
||||||
{
|
{
|
||||||
@@ -127,18 +125,20 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
executionContext.Debug($"Delete stale container networks from previous jobs");
|
|
||||||
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
|
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
|
||||||
if (networkPruneExitCode != 0)
|
if (networkPruneExitCode != 0)
|
||||||
{
|
{
|
||||||
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
|
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
|
||||||
}
|
}
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
|
|
||||||
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
|
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
|
||||||
// All containers within a job join the same network
|
// All containers within a job join the same network
|
||||||
|
executionContext.Output("##[group]Create local container network");
|
||||||
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
|
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
|
||||||
await CreateContainerNetworkAsync(executionContext, containerNetwork);
|
await CreateContainerNetworkAsync(executionContext, containerNetwork);
|
||||||
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
|
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
|
|
||||||
foreach (var container in containers)
|
foreach (var container in containers)
|
||||||
{
|
{
|
||||||
@@ -146,10 +146,12 @@ namespace GitHub.Runner.Worker
|
|||||||
await StartContainerAsync(executionContext, container);
|
await StartContainerAsync(executionContext, container);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
executionContext.Output("##[group]Waiting for all services to be ready");
|
||||||
foreach (var container in containers.Where(c => !c.IsJobContainer))
|
foreach (var container in containers.Where(c => !c.IsJobContainer))
|
||||||
{
|
{
|
||||||
await ContainerHealthcheck(executionContext, container);
|
await ContainerHealthcheck(executionContext, container);
|
||||||
}
|
}
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
|
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
|
||||||
@@ -178,6 +180,10 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Container name: {container.ContainerName}");
|
Trace.Info($"Container name: {container.ContainerName}");
|
||||||
Trace.Info($"Container image: {container.ContainerImage}");
|
Trace.Info($"Container image: {container.ContainerImage}");
|
||||||
Trace.Info($"Container options: {container.ContainerCreateOptions}");
|
Trace.Info($"Container options: {container.ContainerCreateOptions}");
|
||||||
|
|
||||||
|
var groupName = container.IsJobContainer ? "Starting job container" : $"Starting {container.ContainerNetworkAlias} service container";
|
||||||
|
executionContext.Output($"##[group]{groupName}");
|
||||||
|
|
||||||
foreach (var port in container.UserPortMappings)
|
foreach (var port in container.UserPortMappings)
|
||||||
{
|
{
|
||||||
Trace.Info($"User provided port: {port.Value}");
|
Trace.Info($"User provided port: {port.Value}");
|
||||||
@@ -185,6 +191,11 @@ namespace GitHub.Runner.Worker
|
|||||||
foreach (var volume in container.UserMountVolumes)
|
foreach (var volume in container.UserMountVolumes)
|
||||||
{
|
{
|
||||||
Trace.Info($"User provided volume: {volume.Value}");
|
Trace.Info($"User provided volume: {volume.Value}");
|
||||||
|
var mount = new MountVolume(volume.Value);
|
||||||
|
if (string.Equals(mount.SourceVolumePath, "/", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
executionContext.Warning($"Volume mount {volume.Value} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pull down docker image with retry up to 3 times
|
// Pull down docker image with retry up to 3 times
|
||||||
@@ -304,6 +315,7 @@ namespace GitHub.Runner.Worker
|
|||||||
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
|
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
|
||||||
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
|
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
|
||||||
}
|
}
|
||||||
|
executionContext.Output("##[endgroup]");
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container)
|
private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container)
|
||||||
|
|||||||
@@ -86,9 +86,9 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
executionContext.Debug("Zipping diagnostic files.");
|
executionContext.Debug("Zipping diagnostic files.");
|
||||||
|
|
||||||
string buildNumber = executionContext.Variables.Build_Number ?? "UnknownBuildNumber";
|
string buildNumber = executionContext.Global.Variables.Build_Number ?? "UnknownBuildNumber";
|
||||||
string buildName = $"Build {buildNumber}";
|
string buildName = $"Build {buildNumber}";
|
||||||
string phaseName = executionContext.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
||||||
|
|
||||||
// zip the files
|
// zip the files
|
||||||
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";
|
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";
|
||||||
|
|||||||
@@ -1,23 +1,26 @@
|
|||||||
using System;
|
using System;
|
||||||
|
using System.Collections;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.Collections.Specialized;
|
using System.Collections.Specialized;
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using System.Web;
|
using System.Web;
|
||||||
using GitHub.Runner.Worker.Container;
|
using GitHub.DistributedTask.Expressions2;
|
||||||
using GitHub.Services.WebApi;
|
|
||||||
using GitHub.DistributedTask.Pipelines;
|
using GitHub.DistributedTask.Pipelines;
|
||||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
using GitHub.Services.WebApi;
|
||||||
using Newtonsoft.Json;
|
using Newtonsoft.Json;
|
||||||
using System.Text;
|
|
||||||
using System.Collections;
|
|
||||||
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
@@ -37,22 +40,16 @@ namespace GitHub.Runner.Worker
|
|||||||
string ContextName { get; }
|
string ContextName { get; }
|
||||||
Task ForceCompleted { get; }
|
Task ForceCompleted { get; }
|
||||||
TaskResult? Result { get; set; }
|
TaskResult? Result { get; set; }
|
||||||
|
TaskResult? Outcome { get; set; }
|
||||||
string ResultCode { get; set; }
|
string ResultCode { get; set; }
|
||||||
TaskResult? CommandResult { get; set; }
|
TaskResult? CommandResult { get; set; }
|
||||||
CancellationToken CancellationToken { get; }
|
CancellationToken CancellationToken { get; }
|
||||||
List<ServiceEndpoint> Endpoints { get; }
|
GlobalContext Global { get; }
|
||||||
|
|
||||||
PlanFeatures Features { get; }
|
|
||||||
Variables Variables { get; }
|
|
||||||
Dictionary<string, string> IntraActionState { get; }
|
Dictionary<string, string> IntraActionState { get; }
|
||||||
HashSet<string> OutputVariables { get; }
|
Dictionary<string, VariableValue> JobOutputs { get; }
|
||||||
IDictionary<String, String> EnvironmentVariables { get; }
|
|
||||||
IDictionary<String, ContextScope> Scopes { get; }
|
|
||||||
StepsContext StepsContext { get; }
|
|
||||||
DictionaryContextData ExpressionValues { get; }
|
DictionaryContextData ExpressionValues { get; }
|
||||||
List<string> PrependPath { get; }
|
IList<IFunctionInfo> ExpressionFunctions { get; }
|
||||||
ContainerInfo Container { get; set; }
|
|
||||||
List<ContainerInfo> ServiceContainers { get; }
|
|
||||||
JobContext JobContext { get; }
|
JobContext JobContext { get; }
|
||||||
|
|
||||||
// Only job level ExecutionContext has JobSteps
|
// Only job level ExecutionContext has JobSteps
|
||||||
@@ -63,13 +60,16 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
bool EchoOnActionCommand { get; set; }
|
bool EchoOnActionCommand { get; set; }
|
||||||
|
|
||||||
|
bool InsideComposite { get; }
|
||||||
|
|
||||||
|
ExecutionContext Root { get; }
|
||||||
|
|
||||||
// Initialize
|
// Initialize
|
||||||
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
||||||
void CancelToken();
|
void CancelToken();
|
||||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null);
|
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null);
|
||||||
|
|
||||||
// logging
|
// logging
|
||||||
bool WriteDebug { get; }
|
|
||||||
long Write(string tag, string message);
|
long Write(string tag, string message);
|
||||||
void QueueAttachFile(string type, string name, string filePath);
|
void QueueAttachFile(string type, string name, string filePath);
|
||||||
|
|
||||||
@@ -97,17 +97,18 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
// others
|
// others
|
||||||
void ForceTaskComplete();
|
void ForceTaskComplete();
|
||||||
void RegisterPostJobStep(string refName, IStep step);
|
void RegisterPostJobStep(IStep step);
|
||||||
|
IStep CreateCompositeStep(string scopeName, IActionRunner step, DictionaryContextData inputsData, Dictionary<string, string> envData);
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
||||||
{
|
{
|
||||||
private const int _maxIssueCount = 10;
|
private const int _maxIssueCount = 10;
|
||||||
|
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
|
||||||
|
|
||||||
private readonly TimelineRecord _record = new TimelineRecord();
|
private readonly TimelineRecord _record = new TimelineRecord();
|
||||||
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
|
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
|
||||||
private readonly object _loggerLock = new object();
|
private readonly object _loggerLock = new object();
|
||||||
private readonly HashSet<string> _outputvariables = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
|
||||||
private readonly object _matchersLock = new object();
|
private readonly object _matchersLock = new object();
|
||||||
|
|
||||||
private event OnMatcherChanged _onMatcherChanged;
|
private event OnMatcherChanged _onMatcherChanged;
|
||||||
@@ -134,18 +135,13 @@ namespace GitHub.Runner.Worker
|
|||||||
public string ContextName { get; private set; }
|
public string ContextName { get; private set; }
|
||||||
public Task ForceCompleted => _forceCompleted.Task;
|
public Task ForceCompleted => _forceCompleted.Task;
|
||||||
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
|
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
|
||||||
public List<ServiceEndpoint> Endpoints { get; private set; }
|
|
||||||
public Variables Variables { get; private set; }
|
|
||||||
public Dictionary<string, string> IntraActionState { get; private set; }
|
public Dictionary<string, string> IntraActionState { get; private set; }
|
||||||
public HashSet<string> OutputVariables => _outputvariables;
|
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
|
||||||
public IDictionary<String, String> EnvironmentVariables { get; private set; }
|
|
||||||
public IDictionary<String, ContextScope> Scopes { get; private set; }
|
|
||||||
public StepsContext StepsContext { get; private set; }
|
|
||||||
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
||||||
public bool WriteDebug { get; private set; }
|
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
|
||||||
public List<string> PrependPath { get; private set; }
|
|
||||||
public ContainerInfo Container { get; set; }
|
// Shared pointer across job-level execution context and step-level execution contexts
|
||||||
public List<ContainerInfo> ServiceContainers { get; private set; }
|
public GlobalContext Global { get; private set; }
|
||||||
|
|
||||||
// Only job level ExecutionContext has JobSteps
|
// Only job level ExecutionContext has JobSteps
|
||||||
public Queue<IStep> JobSteps { get; private set; }
|
public Queue<IStep> JobSteps { get; private set; }
|
||||||
@@ -153,8 +149,12 @@ namespace GitHub.Runner.Worker
|
|||||||
// Only job level ExecutionContext has PostJobSteps
|
// Only job level ExecutionContext has PostJobSteps
|
||||||
public Stack<IStep> PostJobSteps { get; private set; }
|
public Stack<IStep> PostJobSteps { get; private set; }
|
||||||
|
|
||||||
|
// Only job level ExecutionContext has StepsWithPostRegistered
|
||||||
|
public HashSet<Guid> StepsWithPostRegistered { get; private set; }
|
||||||
|
|
||||||
public bool EchoOnActionCommand { get; set; }
|
public bool EchoOnActionCommand { get; set; }
|
||||||
|
|
||||||
|
public bool InsideComposite { get; private set; }
|
||||||
|
|
||||||
public TaskResult? Result
|
public TaskResult? Result
|
||||||
{
|
{
|
||||||
@@ -168,6 +168,8 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public TaskResult? Outcome { get; set; }
|
||||||
|
|
||||||
public TaskResult? CommandResult { get; set; }
|
public TaskResult? CommandResult { get; set; }
|
||||||
|
|
||||||
private string ContextType => _record.RecordType;
|
private string ContextType => _record.RecordType;
|
||||||
@@ -184,9 +186,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public PlanFeatures Features { get; private set; }
|
public ExecutionContext Root
|
||||||
|
|
||||||
private ExecutionContext Root
|
|
||||||
{
|
{
|
||||||
get
|
get
|
||||||
{
|
{
|
||||||
@@ -238,23 +238,56 @@ namespace GitHub.Runner.Worker
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void RegisterPostJobStep(string refName, IStep step)
|
public void RegisterPostJobStep(IStep step)
|
||||||
{
|
{
|
||||||
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, refName, IntraActionState);
|
if (step is IActionRunner actionRunner && !Root.StepsWithPostRegistered.Add(actionRunner.Action.Id))
|
||||||
|
{
|
||||||
|
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to post step stack.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, IntraActionState);
|
||||||
Root.PostJobSteps.Push(step);
|
Root.PostJobSteps.Push(step);
|
||||||
}
|
}
|
||||||
|
|
||||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null)
|
/// <summary>
|
||||||
|
/// Helper function used in CompositeActionHandler::RunAsync to
|
||||||
|
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
|
||||||
|
/// </summary>
|
||||||
|
public IStep CreateCompositeStep(
|
||||||
|
string scopeName,
|
||||||
|
IActionRunner step,
|
||||||
|
DictionaryContextData inputsData,
|
||||||
|
Dictionary<string, string> envData)
|
||||||
|
{
|
||||||
|
step.ExecutionContext = Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, insideComposite: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
|
||||||
|
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||||
|
step.ExecutionContext.ExpressionValues["steps"] = Global.StepsContext.GetScope(step.ExecutionContext.GetFullyQualifiedContextName());
|
||||||
|
|
||||||
|
// Add the composite action environment variables to each step.
|
||||||
|
#if OS_WINDOWS
|
||||||
|
var envContext = new DictionaryContextData();
|
||||||
|
#else
|
||||||
|
var envContext = new CaseSensitiveDictionaryContextData();
|
||||||
|
#endif
|
||||||
|
foreach (var pair in envData)
|
||||||
|
{
|
||||||
|
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||||
|
}
|
||||||
|
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||||
|
|
||||||
|
return step;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null)
|
||||||
{
|
{
|
||||||
Trace.Entering();
|
Trace.Entering();
|
||||||
|
|
||||||
var child = new ExecutionContext();
|
var child = new ExecutionContext();
|
||||||
child.Initialize(HostContext);
|
child.Initialize(HostContext);
|
||||||
|
child.Global = Global;
|
||||||
child.ScopeName = scopeName;
|
child.ScopeName = scopeName;
|
||||||
child.ContextName = contextName;
|
child.ContextName = contextName;
|
||||||
child.Features = Features;
|
|
||||||
child.Variables = Variables;
|
|
||||||
child.Endpoints = Endpoints;
|
|
||||||
if (intraActionState == null)
|
if (intraActionState == null)
|
||||||
{
|
{
|
||||||
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||||
@@ -263,19 +296,16 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
child.IntraActionState = intraActionState;
|
child.IntraActionState = intraActionState;
|
||||||
}
|
}
|
||||||
child.EnvironmentVariables = EnvironmentVariables;
|
|
||||||
child.Scopes = Scopes;
|
|
||||||
child.StepsContext = StepsContext;
|
|
||||||
foreach (var pair in ExpressionValues)
|
foreach (var pair in ExpressionValues)
|
||||||
{
|
{
|
||||||
child.ExpressionValues[pair.Key] = pair.Value;
|
child.ExpressionValues[pair.Key] = pair.Value;
|
||||||
}
|
}
|
||||||
child._cancellationTokenSource = new CancellationTokenSource();
|
foreach (var item in ExpressionFunctions)
|
||||||
child.WriteDebug = WriteDebug;
|
{
|
||||||
|
child.ExpressionFunctions.Add(item);
|
||||||
|
}
|
||||||
|
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
|
||||||
child._parentExecutionContext = this;
|
child._parentExecutionContext = this;
|
||||||
child.PrependPath = PrependPath;
|
|
||||||
child.Container = Container;
|
|
||||||
child.ServiceContainers = ServiceContainers;
|
|
||||||
child.EchoOnActionCommand = EchoOnActionCommand;
|
child.EchoOnActionCommand = EchoOnActionCommand;
|
||||||
|
|
||||||
if (recordOrder != null)
|
if (recordOrder != null)
|
||||||
@@ -286,9 +316,17 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
|
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
|
||||||
}
|
}
|
||||||
|
if (logger != null)
|
||||||
|
{
|
||||||
|
child._logger = logger;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
child._logger = HostContext.CreateService<IPagingLogger>();
|
||||||
|
child._logger.Setup(_mainTimelineId, recordId);
|
||||||
|
}
|
||||||
|
|
||||||
child._logger = HostContext.CreateService<IPagingLogger>();
|
child.InsideComposite = insideComposite;
|
||||||
child._logger.Setup(_mainTimelineId, recordId);
|
|
||||||
|
|
||||||
return child;
|
return child;
|
||||||
}
|
}
|
||||||
@@ -310,7 +348,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
|
|
||||||
// report total delay caused by server throttling.
|
// report total delay caused by server throttling.
|
||||||
if (_totalThrottlingDelayInMilliseconds > 0)
|
if (_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
|
||||||
{
|
{
|
||||||
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
|
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
|
||||||
}
|
}
|
||||||
@@ -338,10 +376,21 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_cancellationTokenSource?.Dispose();
|
if (Root != this)
|
||||||
|
{
|
||||||
|
// only dispose TokenSource for step level ExecutionContext
|
||||||
|
_cancellationTokenSource?.Dispose();
|
||||||
|
}
|
||||||
|
|
||||||
_logger.End();
|
_logger.End();
|
||||||
|
|
||||||
|
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
|
||||||
|
if (!string.IsNullOrEmpty(ContextName) && !ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
Global.StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
|
||||||
|
Global.StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
|
||||||
|
}
|
||||||
|
|
||||||
return Result.Value;
|
return Result.Value;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -398,7 +447,8 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
ArgUtil.NotNullOrEmpty(name, nameof(name));
|
ArgUtil.NotNullOrEmpty(name, nameof(name));
|
||||||
|
|
||||||
if (String.IsNullOrEmpty(ContextName))
|
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
|
||||||
|
if (string.IsNullOrEmpty(ContextName) || ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||||
{
|
{
|
||||||
reference = null;
|
reference = null;
|
||||||
return;
|
return;
|
||||||
@@ -406,7 +456,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
// todo: restrict multiline?
|
// todo: restrict multiline?
|
||||||
|
|
||||||
StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
|
Global.StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void SetTimeout(TimeSpan? timeout)
|
public void SetTimeout(TimeSpan? timeout)
|
||||||
@@ -540,33 +590,35 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||||
|
|
||||||
// Features
|
Global = new GlobalContext();
|
||||||
Features = PlanUtil.GetFeatures(message.Plan);
|
|
||||||
|
// Plan
|
||||||
|
Global.Plan = message.Plan;
|
||||||
|
Global.Features = PlanUtil.GetFeatures(message.Plan);
|
||||||
|
|
||||||
// Endpoints
|
// Endpoints
|
||||||
Endpoints = message.Resources.Endpoints;
|
Global.Endpoints = message.Resources.Endpoints;
|
||||||
|
|
||||||
// Variables
|
// Variables
|
||||||
Variables = new Variables(HostContext, message.Variables);
|
Global.Variables = new Variables(HostContext, message.Variables);
|
||||||
|
|
||||||
// Environment variables shared across all actions
|
// Environment variables shared across all actions
|
||||||
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||||
|
|
||||||
|
// Job defaults shared across all actions
|
||||||
|
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
|
// Job Outputs
|
||||||
|
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
|
||||||
// Service container info
|
// Service container info
|
||||||
ServiceContainers = new List<ContainerInfo>();
|
Global.ServiceContainers = new List<ContainerInfo>();
|
||||||
|
|
||||||
// Steps context (StepsRunner manages adding the scoped steps context)
|
// Steps context (StepsRunner manages adding the scoped steps context)
|
||||||
StepsContext = new StepsContext();
|
Global.StepsContext = new StepsContext();
|
||||||
|
|
||||||
// Scopes
|
// File table
|
||||||
Scopes = new Dictionary<String, ContextScope>(StringComparer.OrdinalIgnoreCase);
|
Global.FileTable = new List<String>(message.FileTable ?? new string[0]);
|
||||||
if (message.Scopes?.Count > 0)
|
|
||||||
{
|
|
||||||
foreach (var scope in message.Scopes)
|
|
||||||
{
|
|
||||||
Scopes[scope.Name] = scope;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expression values
|
// Expression values
|
||||||
if (message.ContextData?.Count > 0)
|
if (message.ContextData?.Count > 0)
|
||||||
@@ -577,16 +629,21 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ExpressionValues["secrets"] = Variables.ToSecretsContext();
|
ExpressionValues["secrets"] = Global.Variables.ToSecretsContext();
|
||||||
ExpressionValues["runner"] = new RunnerContext();
|
ExpressionValues["runner"] = new RunnerContext();
|
||||||
ExpressionValues["job"] = new JobContext();
|
ExpressionValues["job"] = new JobContext();
|
||||||
|
|
||||||
Trace.Info("Initialize GitHub context");
|
Trace.Info("Initialize GitHub context");
|
||||||
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
|
var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token"));
|
||||||
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
|
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
|
||||||
HostContext.SecretMasker.AddValue(base64EncodedToken);
|
HostContext.SecretMasker.AddValue(base64EncodedToken);
|
||||||
|
var githubJob = Global.Variables.Get("system.github.job");
|
||||||
var githubContext = new GitHubContext();
|
var githubContext = new GitHubContext();
|
||||||
githubContext["token"] = githubAccessToken;
|
githubContext["token"] = githubAccessToken;
|
||||||
|
if (!string.IsNullOrEmpty(githubJob))
|
||||||
|
{
|
||||||
|
githubContext["job"] = new StringContextData(githubJob);
|
||||||
|
}
|
||||||
var githubDictionary = ExpressionValues["github"].AssertDictionary("github");
|
var githubDictionary = ExpressionValues["github"].AssertDictionary("github");
|
||||||
foreach (var pair in githubDictionary)
|
foreach (var pair in githubDictionary)
|
||||||
{
|
{
|
||||||
@@ -603,7 +660,7 @@ namespace GitHub.Runner.Worker
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Prepend Path
|
// Prepend Path
|
||||||
PrependPath = new List<string>();
|
Global.PrependPath = new List<string>();
|
||||||
|
|
||||||
// JobSteps for job ExecutionContext
|
// JobSteps for job ExecutionContext
|
||||||
JobSteps = new Queue<IStep>();
|
JobSteps = new Queue<IStep>();
|
||||||
@@ -611,6 +668,9 @@ namespace GitHub.Runner.Worker
|
|||||||
// PostJobSteps for job ExecutionContext
|
// PostJobSteps for job ExecutionContext
|
||||||
PostJobSteps = new Stack<IStep>();
|
PostJobSteps = new Stack<IStep>();
|
||||||
|
|
||||||
|
// StepsWithPostRegistered for job ExecutionContext
|
||||||
|
StepsWithPostRegistered = new HashSet<Guid>();
|
||||||
|
|
||||||
// Job timeline record.
|
// Job timeline record.
|
||||||
InitializeTimelineRecord(
|
InitializeTimelineRecord(
|
||||||
timelineId: message.Timeline.Id,
|
timelineId: message.Timeline.Id,
|
||||||
@@ -626,10 +686,10 @@ namespace GitHub.Runner.Worker
|
|||||||
_logger.Setup(_mainTimelineId, _record.Id);
|
_logger.Setup(_mainTimelineId, _record.Id);
|
||||||
|
|
||||||
// Initialize 'echo on action command success' property, default to false, unless Step_Debug is set
|
// Initialize 'echo on action command success' property, default to false, unless Step_Debug is set
|
||||||
EchoOnActionCommand = Variables.Step_Debug ?? false;
|
EchoOnActionCommand = Global.Variables.Step_Debug ?? false;
|
||||||
|
|
||||||
// Verbosity (from GitHub.Step_Debug).
|
// Verbosity (from GitHub.Step_Debug).
|
||||||
WriteDebug = Variables.Step_Debug ?? false;
|
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
|
||||||
|
|
||||||
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
|
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
|
||||||
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
|
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
|
||||||
@@ -657,7 +717,8 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg);
|
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg, totalLines);
|
||||||
|
|
||||||
return totalLines;
|
return totalLines;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -722,7 +783,7 @@ namespace GitHub.Runner.Worker
|
|||||||
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
|
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
|
||||||
var joinedOwners = string.Join(", ", owners);
|
var joinedOwners = string.Join(", ", owners);
|
||||||
// todo: loc
|
// todo: loc
|
||||||
this.Output($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
|
this.Debug($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -764,7 +825,7 @@ namespace GitHub.Runner.Worker
|
|||||||
owners = removedMatchers.Select(x => $"'{x.Owner}'");
|
owners = removedMatchers.Select(x => $"'{x.Owner}'");
|
||||||
var joinedOwners = string.Join(", ", owners);
|
var joinedOwners = string.Join(", ", owners);
|
||||||
// todo: loc
|
// todo: loc
|
||||||
this.Output($"Removed matchers: {joinedOwners}");
|
this.Debug($"Removed matchers: {joinedOwners}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -803,7 +864,8 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
|
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
|
||||||
|
|
||||||
if (!_throttlingReported)
|
if (!_throttlingReported &&
|
||||||
|
_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
|
||||||
{
|
{
|
||||||
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));
|
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));
|
||||||
|
|
||||||
@@ -811,7 +873,7 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private IExecutionContext CreatePostChild(string displayName, string refName, Dictionary<string, string> intraActionState)
|
private IExecutionContext CreatePostChild(string displayName, Dictionary<string, string> intraActionState)
|
||||||
{
|
{
|
||||||
if (!_expandedForPostJob)
|
if (!_expandedForPostJob)
|
||||||
{
|
{
|
||||||
@@ -820,7 +882,8 @@ namespace GitHub.Runner.Worker
|
|||||||
_childTimelineRecordOrder = _childTimelineRecordOrder * 2;
|
_childTimelineRecordOrder = _childTimelineRecordOrder * 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
return CreateChild(Guid.NewGuid(), displayName, refName, null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
|
var newGuid = Guid.NewGuid();
|
||||||
|
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -828,6 +891,16 @@ namespace GitHub.Runner.Worker
|
|||||||
// Otherwise individual overloads would need to be implemented (depending on the unit test).
|
// Otherwise individual overloads would need to be implemented (depending on the unit test).
|
||||||
public static class ExecutionContextExtension
|
public static class ExecutionContextExtension
|
||||||
{
|
{
|
||||||
|
public static string GetFullyQualifiedContextName(this IExecutionContext context)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(context.ScopeName))
|
||||||
|
{
|
||||||
|
return $"{context.ScopeName}.{context.ContextName}";
|
||||||
|
}
|
||||||
|
|
||||||
|
return context.ContextName;
|
||||||
|
}
|
||||||
|
|
||||||
public static void Error(this IExecutionContext context, Exception ex)
|
public static void Error(this IExecutionContext context, Exception ex)
|
||||||
{
|
{
|
||||||
context.Error(ex.Message);
|
context.Error(ex.Message);
|
||||||
@@ -866,7 +939,7 @@ namespace GitHub.Runner.Worker
|
|||||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||||
public static void Debug(this IExecutionContext context, string message)
|
public static void Debug(this IExecutionContext context, string message)
|
||||||
{
|
{
|
||||||
if (context.WriteDebug)
|
if (context.Global.WriteDebug)
|
||||||
{
|
{
|
||||||
var multilines = message?.Replace("\r\n", "\n")?.Split("\n");
|
var multilines = message?.Replace("\r\n", "\n")?.Split("\n");
|
||||||
if (multilines != null)
|
if (multilines != null)
|
||||||
@@ -879,6 +952,21 @@ namespace GitHub.Runner.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static IEnumerable<KeyValuePair<string, object>> ToExpressionState(this IExecutionContext context)
|
||||||
|
{
|
||||||
|
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
|
||||||
|
{
|
||||||
|
if (traceWriter == null)
|
||||||
|
{
|
||||||
|
traceWriter = context.ToTemplateTraceWriter();
|
||||||
|
}
|
||||||
|
var schema = PipelineTemplateSchemaFactory.GetSchema();
|
||||||
|
return new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable);
|
||||||
|
}
|
||||||
|
|
||||||
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
|
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
|
||||||
{
|
{
|
||||||
return new TemplateTraceWriter(context);
|
return new TemplateTraceWriter(context);
|
||||||
@@ -891,6 +979,7 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
internal TemplateTraceWriter(IExecutionContext executionContext)
|
internal TemplateTraceWriter(IExecutionContext executionContext)
|
||||||
{
|
{
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
_executionContext = executionContext;
|
_executionContext = executionContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,162 +0,0 @@
|
|||||||
using System;
|
|
||||||
using System.Collections;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.Linq;
|
|
||||||
using System.Text;
|
|
||||||
using GitHub.DistributedTask.Expressions2;
|
|
||||||
using GitHub.DistributedTask.Expressions2.Sdk;
|
|
||||||
using GitHub.DistributedTask.WebApi;
|
|
||||||
using GitHub.Runner.Common;
|
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Runner.Sdk;
|
|
||||||
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
|
||||||
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
|
||||||
{
|
|
||||||
[ServiceLocator(Default = typeof(ExpressionManager))]
|
|
||||||
public interface IExpressionManager : IRunnerService
|
|
||||||
{
|
|
||||||
ConditionResult Evaluate(IExecutionContext context, string condition, bool hostTracingOnly = false);
|
|
||||||
}
|
|
||||||
|
|
||||||
public sealed class ExpressionManager : RunnerService, IExpressionManager
|
|
||||||
{
|
|
||||||
public ConditionResult Evaluate(IExecutionContext executionContext, string condition, bool hostTracingOnly = false)
|
|
||||||
{
|
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
|
||||||
|
|
||||||
ConditionResult result = new ConditionResult();
|
|
||||||
var expressionTrace = new TraceWriter(Trace, hostTracingOnly ? null : executionContext);
|
|
||||||
var tree = Parse(executionContext, expressionTrace, condition);
|
|
||||||
var expressionResult = tree.Evaluate(expressionTrace, HostContext.SecretMasker, state: executionContext, options: null);
|
|
||||||
result.Value = expressionResult.IsTruthy;
|
|
||||||
result.Trace = expressionTrace.Trace;
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static IExpressionNode Parse(IExecutionContext executionContext, TraceWriter expressionTrace, string condition)
|
|
||||||
{
|
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
|
||||||
|
|
||||||
if (string.IsNullOrWhiteSpace(condition))
|
|
||||||
{
|
|
||||||
condition = $"{PipelineTemplateConstants.Success}()";
|
|
||||||
}
|
|
||||||
|
|
||||||
var parser = new ExpressionParser();
|
|
||||||
var namedValues = executionContext.ExpressionValues.Keys.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
|
|
||||||
var functions = new IFunctionInfo[]
|
|
||||||
{
|
|
||||||
new FunctionInfo<AlwaysNode>(name: Constants.Expressions.Always, minParameters: 0, maxParameters: 0),
|
|
||||||
new FunctionInfo<CancelledNode>(name: Constants.Expressions.Cancelled, minParameters: 0, maxParameters: 0),
|
|
||||||
new FunctionInfo<FailureNode>(name: Constants.Expressions.Failure, minParameters: 0, maxParameters: 0),
|
|
||||||
new FunctionInfo<SuccessNode>(name: Constants.Expressions.Success, minParameters: 0, maxParameters: 0),
|
|
||||||
};
|
|
||||||
return parser.CreateTree(condition, expressionTrace, namedValues, functions) ?? new SuccessNode();
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class TraceWriter : DistributedTask.Expressions2.ITraceWriter
|
|
||||||
{
|
|
||||||
private readonly IExecutionContext _executionContext;
|
|
||||||
private readonly Tracing _trace;
|
|
||||||
private readonly StringBuilder _traceBuilder = new StringBuilder();
|
|
||||||
|
|
||||||
public string Trace => _traceBuilder.ToString();
|
|
||||||
|
|
||||||
public TraceWriter(Tracing trace, IExecutionContext executionContext)
|
|
||||||
{
|
|
||||||
ArgUtil.NotNull(trace, nameof(trace));
|
|
||||||
_trace = trace;
|
|
||||||
_executionContext = executionContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void Info(string message)
|
|
||||||
{
|
|
||||||
_trace.Info(message);
|
|
||||||
_executionContext?.Debug(message);
|
|
||||||
_traceBuilder.AppendLine(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void Verbose(string message)
|
|
||||||
{
|
|
||||||
_trace.Verbose(message);
|
|
||||||
_executionContext?.Debug(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class AlwaysNode : Function
|
|
||||||
{
|
|
||||||
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
|
|
||||||
{
|
|
||||||
resultMemory = null;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class CancelledNode : Function
|
|
||||||
{
|
|
||||||
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
|
||||||
{
|
|
||||||
resultMemory = null;
|
|
||||||
var executionContext = evaluationContext.State as IExecutionContext;
|
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
|
||||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
|
||||||
return jobStatus == ActionResult.Cancelled;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class FailureNode : Function
|
|
||||||
{
|
|
||||||
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
|
||||||
{
|
|
||||||
resultMemory = null;
|
|
||||||
var executionContext = evaluationContext.State as IExecutionContext;
|
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
|
||||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
|
||||||
return jobStatus == ActionResult.Failure;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class SuccessNode : Function
|
|
||||||
{
|
|
||||||
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
|
||||||
{
|
|
||||||
resultMemory = null;
|
|
||||||
var executionContext = evaluationContext.State as IExecutionContext;
|
|
||||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
|
||||||
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
|
||||||
return jobStatus == ActionResult.Success;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private sealed class ContextValueNode : NamedValue
|
|
||||||
{
|
|
||||||
protected override Object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
|
||||||
{
|
|
||||||
resultMemory = null;
|
|
||||||
var jobContext = evaluationContext.State as IExecutionContext;
|
|
||||||
ArgUtil.NotNull(jobContext, nameof(jobContext));
|
|
||||||
return jobContext.ExpressionValues[Name];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public class ConditionResult
|
|
||||||
{
|
|
||||||
public ConditionResult(bool value = false, string trace = null)
|
|
||||||
{
|
|
||||||
this.Value = value;
|
|
||||||
this.Trace = trace;
|
|
||||||
}
|
|
||||||
|
|
||||||
public bool Value { get; set; }
|
|
||||||
public string Trace { get; set; }
|
|
||||||
|
|
||||||
public static implicit operator ConditionResult(bool value)
|
|
||||||
{
|
|
||||||
return new ConditionResult(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
25
src/Runner.Worker/Expressions/AlwaysFunction.cs
Normal file
25
src/Runner.Worker/Expressions/AlwaysFunction.cs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using GitHub.DistributedTask.Expressions2;
|
||||||
|
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Expressions
|
||||||
|
{
|
||||||
|
public sealed class AlwaysFunction : Function
|
||||||
|
{
|
||||||
|
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
src/Runner.Worker/Expressions/CancelledFunction.cs
Normal file
31
src/Runner.Worker/Expressions/CancelledFunction.cs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using GitHub.DistributedTask.Expressions2;
|
||||||
|
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Expressions
|
||||||
|
{
|
||||||
|
public sealed class CancelledFunction : Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Cancelled;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
src/Runner.Worker/Expressions/FailureFunction.cs
Normal file
31
src/Runner.Worker/Expressions/FailureFunction.cs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using GitHub.DistributedTask.Expressions2;
|
||||||
|
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Expressions
|
||||||
|
{
|
||||||
|
public sealed class FailureFunction : Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Failure;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
138
src/Runner.Worker/Expressions/HashFilesFunction.cs
Normal file
138
src/Runner.Worker/Expressions/HashFilesFunction.cs
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
using System;
|
||||||
|
using System.IO;
|
||||||
|
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using System.Reflection;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Expressions
|
||||||
|
{
|
||||||
|
public sealed class HashFilesFunction : Function
|
||||||
|
{
|
||||||
|
private const int _hashFileTimeoutSeconds = 120;
|
||||||
|
|
||||||
|
protected sealed override Object EvaluateCore(
|
||||||
|
EvaluationContext context,
|
||||||
|
out ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = context.State as DistributedTask.ObjectTemplating.TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData);
|
||||||
|
ArgUtil.NotNull(githubContextData, nameof(githubContextData));
|
||||||
|
var githubContext = githubContextData as DictionaryContextData;
|
||||||
|
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||||
|
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace);
|
||||||
|
var workspaceData = workspace as StringContextData;
|
||||||
|
ArgUtil.NotNull(workspaceData, nameof(workspaceData));
|
||||||
|
|
||||||
|
string githubWorkspace = workspaceData.Value;
|
||||||
|
bool followSymlink = false;
|
||||||
|
List<string> patterns = new List<string>();
|
||||||
|
var firstParameter = true;
|
||||||
|
foreach (var parameter in Parameters)
|
||||||
|
{
|
||||||
|
var parameterString = parameter.Evaluate(context).ConvertToString();
|
||||||
|
if (firstParameter)
|
||||||
|
{
|
||||||
|
firstParameter = false;
|
||||||
|
if (parameterString.StartsWith("--"))
|
||||||
|
{
|
||||||
|
if (string.Equals(parameterString, "--follow-symbolic-links", StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
followSymlink = true;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new ArgumentOutOfRangeException($"Invalid glob option {parameterString}, avaliable option: '--follow-symbolic-links'.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
patterns.Add(parameterString);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.Trace.Info($"Search root directory: '{githubWorkspace}'");
|
||||||
|
context.Trace.Info($"Search pattern: '{string.Join(", ", patterns)}'");
|
||||||
|
|
||||||
|
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||||
|
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||||
|
|
||||||
|
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||||
|
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||||
|
var hashResult = string.Empty;
|
||||||
|
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));
|
||||||
|
p.ErrorDataReceived += ((_, data) =>
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
|
||||||
|
{
|
||||||
|
hashResult = data.Data.Substring(10, data.Data.Length - 20);
|
||||||
|
context.Trace.Info($"Hash result: '{hashResult}'");
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
context.Trace.Info(data.Data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
p.OutputDataReceived += ((_, data) =>
|
||||||
|
{
|
||||||
|
context.Trace.Info(data.Data);
|
||||||
|
});
|
||||||
|
|
||||||
|
var env = new Dictionary<string, string>();
|
||||||
|
if (followSymlink)
|
||||||
|
{
|
||||||
|
env["followSymbolicLinks"] = "true";
|
||||||
|
}
|
||||||
|
env["patterns"] = string.Join(Environment.NewLine, patterns);
|
||||||
|
|
||||||
|
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
|
||||||
|
fileName: node,
|
||||||
|
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
|
||||||
|
environment: env,
|
||||||
|
requireExitCodeZero: false,
|
||||||
|
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
|
||||||
|
|
||||||
|
if (exitCode != 0)
|
||||||
|
{
|
||||||
|
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return hashResult;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sealed class HashFilesTrace : ITraceWriter
|
||||||
|
{
|
||||||
|
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
|
||||||
|
|
||||||
|
public HashFilesTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
|
||||||
|
{
|
||||||
|
_trace = trace;
|
||||||
|
}
|
||||||
|
public void Info(string message)
|
||||||
|
{
|
||||||
|
_trace.Info(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Verbose(string message)
|
||||||
|
{
|
||||||
|
_trace.Info(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
31
src/Runner.Worker/Expressions/SuccessFunction.cs
Normal file
31
src/Runner.Worker/Expressions/SuccessFunction.cs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using GitHub.DistributedTask.Expressions2;
|
||||||
|
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
|
||||||
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Expressions
|
||||||
|
{
|
||||||
|
public sealed class SuccessFunction : Function
|
||||||
|
{
|
||||||
|
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
|
||||||
|
{
|
||||||
|
resultMemory = null;
|
||||||
|
var templateContext = evaluationContext.State as TemplateContext;
|
||||||
|
ArgUtil.NotNull(templateContext, nameof(templateContext));
|
||||||
|
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
|
||||||
|
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||||
|
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
|
||||||
|
return jobStatus == ActionResult.Success;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
262
src/Runner.Worker/FileCommandManager.cs
Normal file
262
src/Runner.Worker/FileCommandManager.cs
Normal file
@@ -0,0 +1,262 @@
|
|||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using System;
|
||||||
|
using System.Collections;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Text;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(FileCommandManager))]
|
||||||
|
public interface IFileCommandManager : IRunnerService
|
||||||
|
{
|
||||||
|
void InitializeFiles(IExecutionContext context, ContainerInfo container);
|
||||||
|
void ProcessFiles(IExecutionContext context, ContainerInfo container);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class FileCommandManager : RunnerService, IFileCommandManager
|
||||||
|
{
|
||||||
|
private const string _folderName = "_runner_file_commands";
|
||||||
|
private List<IFileCommandExtension> _commandExtensions;
|
||||||
|
private string _fileSuffix = String.Empty;
|
||||||
|
private string _fileCommandDirectory;
|
||||||
|
private Tracing _trace;
|
||||||
|
|
||||||
|
public override void Initialize(IHostContext hostContext)
|
||||||
|
{
|
||||||
|
base.Initialize(hostContext);
|
||||||
|
_trace = HostContext.GetTrace(nameof(FileCommandManager));
|
||||||
|
|
||||||
|
_fileCommandDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), _folderName);
|
||||||
|
if (!Directory.Exists(_fileCommandDirectory))
|
||||||
|
{
|
||||||
|
Directory.CreateDirectory(_fileCommandDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
var extensionManager = hostContext.GetService<IExtensionManager>();
|
||||||
|
_commandExtensions = extensionManager.GetExtensions<IFileCommandExtension>() ?? new List<IFileCommandExtension>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void InitializeFiles(IExecutionContext context, ContainerInfo container)
|
||||||
|
{
|
||||||
|
var oldSuffix = _fileSuffix;
|
||||||
|
_fileSuffix = Guid.NewGuid().ToString();
|
||||||
|
foreach (var fileCommand in _commandExtensions)
|
||||||
|
{
|
||||||
|
var oldPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + oldSuffix);
|
||||||
|
if (oldSuffix != String.Empty && File.Exists(oldPath))
|
||||||
|
{
|
||||||
|
TryDeleteFile(oldPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
var newPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix);
|
||||||
|
TryDeleteFile(newPath);
|
||||||
|
File.Create(newPath).Dispose();
|
||||||
|
|
||||||
|
var pathToSet = container != null ? container.TranslateToContainerPath(newPath) : newPath;
|
||||||
|
context.SetGitHubContext(fileCommand.ContextName, pathToSet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void ProcessFiles(IExecutionContext context, ContainerInfo container)
|
||||||
|
{
|
||||||
|
foreach (var fileCommand in _commandExtensions)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
fileCommand.ProcessCommand(context, Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix),container);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
context.Error($"Unable to process file command '{fileCommand.ContextName}' successfully.");
|
||||||
|
context.Error(ex);
|
||||||
|
context.CommandResult = TaskResult.Failed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private bool TryDeleteFile(string path)
|
||||||
|
{
|
||||||
|
if (!File.Exists(path))
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
try
|
||||||
|
{
|
||||||
|
File.Delete(path);
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
_trace.Warning($"Unable to delete file {path} for reason: {e.ToString()}");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public interface IFileCommandExtension : IExtension
|
||||||
|
{
|
||||||
|
string ContextName { get; }
|
||||||
|
string FilePrefix { get; }
|
||||||
|
|
||||||
|
void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container);
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class AddPathFileCommand : RunnerService, IFileCommandExtension
|
||||||
|
{
|
||||||
|
public string ContextName => "path";
|
||||||
|
public string FilePrefix => "add_path_";
|
||||||
|
|
||||||
|
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||||
|
|
||||||
|
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||||
|
{
|
||||||
|
if (File.Exists(filePath))
|
||||||
|
{
|
||||||
|
var lines = File.ReadAllLines(filePath, Encoding.UTF8);
|
||||||
|
foreach(var line in lines)
|
||||||
|
{
|
||||||
|
if (line == string.Empty)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
context.Global.PrependPath.RemoveAll(x => string.Equals(x, line, StringComparison.CurrentCulture));
|
||||||
|
context.Global.PrependPath.Add(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public sealed class SetEnvFileCommand : RunnerService, IFileCommandExtension
|
||||||
|
{
|
||||||
|
public string ContextName => "env";
|
||||||
|
public string FilePrefix => "set_env_";
|
||||||
|
|
||||||
|
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||||
|
|
||||||
|
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var text = File.ReadAllText(filePath) ?? string.Empty;
|
||||||
|
var index = 0;
|
||||||
|
var line = ReadLine(text, ref index);
|
||||||
|
while (line != null)
|
||||||
|
{
|
||||||
|
if (!string.IsNullOrEmpty(line))
|
||||||
|
{
|
||||||
|
var equalsIndex = line.IndexOf("=", StringComparison.Ordinal);
|
||||||
|
var heredocIndex = line.IndexOf("<<", StringComparison.Ordinal);
|
||||||
|
|
||||||
|
// Normal style NAME=VALUE
|
||||||
|
if (equalsIndex >= 0 && (heredocIndex < 0 || equalsIndex < heredocIndex))
|
||||||
|
{
|
||||||
|
var split = line.Split(new[] { '=' }, 2, StringSplitOptions.None);
|
||||||
|
if (string.IsNullOrEmpty(line))
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty");
|
||||||
|
}
|
||||||
|
SetEnvironmentVariable(context, split[0], split[1]);
|
||||||
|
}
|
||||||
|
// Heredoc style NAME<<EOF
|
||||||
|
else if (heredocIndex >= 0 && (equalsIndex < 0 || heredocIndex < equalsIndex))
|
||||||
|
{
|
||||||
|
var split = line.Split(new[] { "<<" }, 2, StringSplitOptions.None);
|
||||||
|
if (string.IsNullOrEmpty(split[0]) || string.IsNullOrEmpty(split[1]))
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty and delimiter must not be empty");
|
||||||
|
}
|
||||||
|
var name = split[0];
|
||||||
|
var delimiter = split[1];
|
||||||
|
var startIndex = index; // Start index of the value (inclusive)
|
||||||
|
var endIndex = index; // End index of the value (exclusive)
|
||||||
|
var tempLine = ReadLine(text, ref index, out var newline);
|
||||||
|
while (!string.Equals(tempLine, delimiter, StringComparison.Ordinal))
|
||||||
|
{
|
||||||
|
if (tempLine == null)
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
||||||
|
}
|
||||||
|
endIndex = index - newline.Length;
|
||||||
|
tempLine = ReadLine(text, ref index, out newline);
|
||||||
|
}
|
||||||
|
|
||||||
|
var value = endIndex > startIndex ? text.Substring(startIndex, endIndex - startIndex) : string.Empty;
|
||||||
|
SetEnvironmentVariable(context, name, value);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid environment variable format '{line}'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
line = ReadLine(text, ref index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (DirectoryNotFoundException)
|
||||||
|
{
|
||||||
|
context.Debug($"Environment variables file does not exist '{filePath}'");
|
||||||
|
}
|
||||||
|
catch (FileNotFoundException)
|
||||||
|
{
|
||||||
|
context.Debug($"Environment variables file does not exist '{filePath}'");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void SetEnvironmentVariable(
|
||||||
|
IExecutionContext context,
|
||||||
|
string name,
|
||||||
|
string value)
|
||||||
|
{
|
||||||
|
context.Global.EnvironmentVariables[name] = value;
|
||||||
|
context.SetEnvContext(name, value);
|
||||||
|
context.Debug($"{name}='{value}'");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ReadLine(
|
||||||
|
string text,
|
||||||
|
ref int index)
|
||||||
|
{
|
||||||
|
return ReadLine(text, ref index, out _);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static string ReadLine(
|
||||||
|
string text,
|
||||||
|
ref int index,
|
||||||
|
out string newline)
|
||||||
|
{
|
||||||
|
if (index >= text.Length)
|
||||||
|
{
|
||||||
|
newline = null;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
var originalIndex = index;
|
||||||
|
var lfIndex = text.IndexOf("\n", index, StringComparison.Ordinal);
|
||||||
|
if (lfIndex < 0)
|
||||||
|
{
|
||||||
|
index = text.Length;
|
||||||
|
newline = null;
|
||||||
|
return text.Substring(originalIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
#if OS_WINDOWS
|
||||||
|
var crLFIndex = text.IndexOf("\r\n", index, StringComparison.Ordinal);
|
||||||
|
if (crLFIndex >= 0 && crLFIndex < lfIndex)
|
||||||
|
{
|
||||||
|
index = crLFIndex + 2; // Skip over CRLF
|
||||||
|
newline = "\r\n";
|
||||||
|
return text.Substring(originalIndex, crLFIndex - originalIndex);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
index = lfIndex + 1; // Skip over LF
|
||||||
|
newline = "\n";
|
||||||
|
return text.Substring(originalIndex, lfIndex - originalIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,18 +6,26 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
public sealed class GitHubContext : DictionaryContextData, IEnvironmentContextData
|
public sealed class GitHubContext : DictionaryContextData, IEnvironmentContextData
|
||||||
{
|
{
|
||||||
private readonly HashSet<string> _contextEnvWhitelist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||||
{
|
{
|
||||||
"action",
|
"action",
|
||||||
|
"action_path",
|
||||||
"actor",
|
"actor",
|
||||||
|
"api_url",
|
||||||
"base_ref",
|
"base_ref",
|
||||||
|
"env",
|
||||||
"event_name",
|
"event_name",
|
||||||
"event_path",
|
"event_path",
|
||||||
|
"graphql_url",
|
||||||
"head_ref",
|
"head_ref",
|
||||||
|
"job",
|
||||||
|
"path",
|
||||||
"ref",
|
"ref",
|
||||||
"repository",
|
"repository",
|
||||||
|
"repository_owner",
|
||||||
"run_id",
|
"run_id",
|
||||||
"run_number",
|
"run_number",
|
||||||
|
"server_url",
|
||||||
"sha",
|
"sha",
|
||||||
"workflow",
|
"workflow",
|
||||||
"workspace",
|
"workspace",
|
||||||
@@ -27,11 +35,23 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
foreach (var data in this)
|
foreach (var data in this)
|
||||||
{
|
{
|
||||||
if (_contextEnvWhitelist.Contains(data.Key) && data.Value is StringContextData value)
|
if (_contextEnvAllowlist.Contains(data.Key) && data.Value is StringContextData value)
|
||||||
{
|
{
|
||||||
yield return new KeyValuePair<string, string>($"GITHUB_{data.Key.ToUpperInvariant()}", value);
|
yield return new KeyValuePair<string, string>($"GITHUB_{data.Key.ToUpperInvariant()}", value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GitHubContext ShallowCopy()
|
||||||
|
{
|
||||||
|
var copy = new GitHubContext();
|
||||||
|
|
||||||
|
foreach (var pair in this)
|
||||||
|
{
|
||||||
|
copy[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
24
src/Runner.Worker/GlobalContext.cs
Normal file
24
src/Runner.Worker/GlobalContext.cs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker
|
||||||
|
{
|
||||||
|
public sealed class GlobalContext
|
||||||
|
{
|
||||||
|
public ContainerInfo Container { get; set; }
|
||||||
|
public List<ServiceEndpoint> Endpoints { get; set; }
|
||||||
|
public IDictionary<String, String> EnvironmentVariables { get; set; }
|
||||||
|
public PlanFeatures Features { get; set; }
|
||||||
|
public IList<String> FileTable { get; set; }
|
||||||
|
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
|
||||||
|
public TaskOrchestrationPlanReference Plan { get; set; }
|
||||||
|
public List<string> PrependPath { get; set; }
|
||||||
|
public List<ContainerInfo> ServiceContainers { get; set; }
|
||||||
|
public StepsContext StepsContext { get; set; }
|
||||||
|
public Variables Variables { get; set; }
|
||||||
|
public bool WriteDebug { get; set; }
|
||||||
|
}
|
||||||
|
}
|
||||||
282
src/Runner.Worker/Handlers/CompositeActionHandler.cs
Normal file
282
src/Runner.Worker/Handlers/CompositeActionHandler.cs
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
|
using System.IO;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
|
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
|
using GitHub.Runner.Common;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
|
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Worker.Handlers
|
||||||
|
{
|
||||||
|
[ServiceLocator(Default = typeof(CompositeActionHandler))]
|
||||||
|
public interface ICompositeActionHandler : IHandler
|
||||||
|
{
|
||||||
|
CompositeActionExecutionData Data { get; set; }
|
||||||
|
}
|
||||||
|
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
|
||||||
|
{
|
||||||
|
public CompositeActionExecutionData Data { get; set; }
|
||||||
|
|
||||||
|
public async Task RunAsync(ActionRunStage stage)
|
||||||
|
{
|
||||||
|
// Validate args.
|
||||||
|
Trace.Entering();
|
||||||
|
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||||
|
ArgUtil.NotNull(Inputs, nameof(Inputs));
|
||||||
|
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
|
||||||
|
|
||||||
|
// Resolve action steps
|
||||||
|
var actionSteps = Data.Steps;
|
||||||
|
|
||||||
|
// Create Context Data to reuse for each composite action step
|
||||||
|
var inputsData = new DictionaryContextData();
|
||||||
|
foreach (var i in Inputs)
|
||||||
|
{
|
||||||
|
inputsData[i.Key] = new StringContextData(i.Value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize Composite Steps List of Steps
|
||||||
|
var compositeSteps = new List<IStep>();
|
||||||
|
|
||||||
|
// Temporary hack until after M271-ish. After M271-ish the server will never send an empty
|
||||||
|
// context name. Generated context names start with "__"
|
||||||
|
var childScopeName = ExecutionContext.GetFullyQualifiedContextName();
|
||||||
|
if (string.IsNullOrEmpty(childScopeName))
|
||||||
|
{
|
||||||
|
childScopeName = $"__{Guid.NewGuid()}";
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach (Pipelines.ActionStep actionStep in actionSteps)
|
||||||
|
{
|
||||||
|
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||||
|
actionRunner.Action = actionStep;
|
||||||
|
actionRunner.Stage = stage;
|
||||||
|
actionRunner.Condition = actionStep.Condition;
|
||||||
|
|
||||||
|
var step = ExecutionContext.CreateCompositeStep(childScopeName, actionRunner, inputsData, Environment);
|
||||||
|
|
||||||
|
// Shallow copy github context
|
||||||
|
var gitHubContext = step.ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||||
|
ArgUtil.NotNull(gitHubContext, nameof(gitHubContext));
|
||||||
|
gitHubContext = gitHubContext.ShallowCopy();
|
||||||
|
step.ExecutionContext.ExpressionValues["github"] = gitHubContext;
|
||||||
|
|
||||||
|
// Set GITHUB_ACTION_PATH
|
||||||
|
step.ExecutionContext.SetGitHubContext("action_path", ActionDirectory);
|
||||||
|
|
||||||
|
compositeSteps.Add(step);
|
||||||
|
}
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// This is where we run each step.
|
||||||
|
await RunStepsAsync(compositeSteps);
|
||||||
|
|
||||||
|
// Get the pointer of the correct "steps" object and pass it to the ExecutionContext so that we can process the outputs correctly
|
||||||
|
ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||||
|
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(ExecutionContext.GetFullyQualifiedContextName());
|
||||||
|
|
||||||
|
ProcessCompositeActionOutputs();
|
||||||
|
|
||||||
|
ExecutionContext.Global.StepsContext.ClearScope(childScopeName);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// Composite StepRunner should never throw exception out.
|
||||||
|
Trace.Error($"Caught exception from composite steps {nameof(CompositeActionHandler)}: {ex}");
|
||||||
|
ExecutionContext.Error(ex);
|
||||||
|
ExecutionContext.Result = TaskResult.Failed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void ProcessCompositeActionOutputs()
|
||||||
|
{
|
||||||
|
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||||
|
|
||||||
|
// Evaluate the mapped outputs value
|
||||||
|
if (Data.Outputs != null)
|
||||||
|
{
|
||||||
|
// Evaluate the outputs in the steps context to easily retrieve the values
|
||||||
|
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
|
||||||
|
|
||||||
|
// Format ExpressionValues to Dictionary<string, PipelineContextData>
|
||||||
|
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||||
|
foreach (var pair in ExecutionContext.ExpressionValues)
|
||||||
|
{
|
||||||
|
evaluateContext[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the evluated composite outputs' values mapped to the outputs named
|
||||||
|
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
|
||||||
|
|
||||||
|
// Set the outputs for the outputs object in the whole composite action
|
||||||
|
// Each pair is structured like this
|
||||||
|
// We ignore "description" for now
|
||||||
|
// {
|
||||||
|
// "the-output-name": {
|
||||||
|
// "description": "",
|
||||||
|
// "value": "the value"
|
||||||
|
// },
|
||||||
|
// ...
|
||||||
|
// }
|
||||||
|
foreach (var pair in actionOutputs)
|
||||||
|
{
|
||||||
|
var outputsName = pair.Key;
|
||||||
|
var outputsAttributes = pair.Value as DictionaryContextData;
|
||||||
|
outputsAttributes.TryGetValue("value", out var val);
|
||||||
|
|
||||||
|
if (val != null)
|
||||||
|
{
|
||||||
|
var outputsValue = val as StringContextData;
|
||||||
|
// Set output in the whole composite scope.
|
||||||
|
if (!String.IsNullOrEmpty(outputsValue))
|
||||||
|
{
|
||||||
|
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ExecutionContext.SetOutput(outputsName, "", out _);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task RunStepsAsync(List<IStep> compositeSteps)
|
||||||
|
{
|
||||||
|
ArgUtil.NotNull(compositeSteps, nameof(compositeSteps));
|
||||||
|
|
||||||
|
// The parent StepsRunner of the whole Composite Action Step handles the cancellation stuff already.
|
||||||
|
foreach (IStep step in compositeSteps)
|
||||||
|
{
|
||||||
|
Trace.Info($"Processing composite step: DisplayName='{step.DisplayName}'");
|
||||||
|
|
||||||
|
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||||
|
|
||||||
|
// Populate env context for each step
|
||||||
|
Trace.Info("Initialize Env context for step");
|
||||||
|
#if OS_WINDOWS
|
||||||
|
var envContext = new DictionaryContextData();
|
||||||
|
#else
|
||||||
|
var envContext = new CaseSensitiveDictionaryContextData();
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Global env
|
||||||
|
foreach (var pair in ExecutionContext.Global.EnvironmentVariables)
|
||||||
|
{
|
||||||
|
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stomps over with outside step env
|
||||||
|
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
|
||||||
|
{
|
||||||
|
#if OS_WINDOWS
|
||||||
|
var dict = envContextData as DictionaryContextData;
|
||||||
|
#else
|
||||||
|
var dict = envContextData as CaseSensitiveDictionaryContextData;
|
||||||
|
#endif
|
||||||
|
foreach (var pair in dict)
|
||||||
|
{
|
||||||
|
envContext[pair.Key] = pair.Value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||||
|
|
||||||
|
var actionStep = step as IActionRunner;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Evaluate and merge action's env block to env context
|
||||||
|
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||||
|
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, Common.Util.VarUtil.EnvironmentVariableKeyComparer);
|
||||||
|
foreach (var env in actionEnvironment)
|
||||||
|
{
|
||||||
|
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// fail the step since there is an evaluate error.
|
||||||
|
Trace.Info("Caught exception in Composite Steps Runner from expression for step.env");
|
||||||
|
// evaluateStepEnvFailed = true;
|
||||||
|
step.ExecutionContext.Error(ex);
|
||||||
|
step.ExecutionContext.Complete(TaskResult.Failed);
|
||||||
|
}
|
||||||
|
|
||||||
|
await RunStepAsync(step);
|
||||||
|
|
||||||
|
// Directly after the step, check if the step has failed or cancelled
|
||||||
|
// If so, return that to the output
|
||||||
|
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||||
|
{
|
||||||
|
ExecutionContext.Result = step.ExecutionContext.Result;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Add compat for other types of steps.
|
||||||
|
}
|
||||||
|
// Completion Status handled by StepsRunner for the whole Composite Action Step
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task RunStepAsync(IStep step)
|
||||||
|
{
|
||||||
|
// Start the step.
|
||||||
|
Trace.Info("Starting the step.");
|
||||||
|
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");
|
||||||
|
|
||||||
|
// TODO: Fix for Step Level Timeout Attributes for an individual Composite Run Step
|
||||||
|
// For now, we are not going to support this for an individual composite run step
|
||||||
|
|
||||||
|
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||||
|
|
||||||
|
await Common.Util.EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await step.RunAsync();
|
||||||
|
}
|
||||||
|
catch (OperationCanceledException ex)
|
||||||
|
{
|
||||||
|
if (step.ExecutionContext.CancellationToken.IsCancellationRequested &&
|
||||||
|
!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
||||||
|
{
|
||||||
|
Trace.Error($"Caught timeout exception from step: {ex.Message}");
|
||||||
|
step.ExecutionContext.Error("The action has timed out.");
|
||||||
|
step.ExecutionContext.Result = TaskResult.Failed;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Trace.Error($"Caught cancellation exception from step: {ex}");
|
||||||
|
step.ExecutionContext.Error(ex);
|
||||||
|
step.ExecutionContext.Result = TaskResult.Canceled;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
// Log the error and fail the step.
|
||||||
|
Trace.Error($"Caught exception from step: {ex}");
|
||||||
|
step.ExecutionContext.Error(ex);
|
||||||
|
step.ExecutionContext.Result = TaskResult.Failed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge execution context result with command result
|
||||||
|
if (step.ExecutionContext.CommandResult != null)
|
||||||
|
{
|
||||||
|
step.ExecutionContext.Result = Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||||
|
|
||||||
|
// Complete the step context.
|
||||||
|
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user