mirror of
https://github.com/actions/runner.git
synced 2025-12-10 04:06:57 +00:00
Compare commits
80 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b75179ec7 | ||
|
|
33ee76df29 | ||
|
|
592ce1b230 | ||
|
|
fff31e11c5 | ||
|
|
6443fe8c97 | ||
|
|
29c09c5bf8 | ||
|
|
09821e2169 | ||
|
|
7c90b2a929 | ||
|
|
ee34f4842e | ||
|
|
713344016d | ||
|
|
0a6c34669c | ||
|
|
40d6eb3da3 | ||
|
|
34a985f3b9 | ||
|
|
42fe704132 | ||
|
|
a1bcd5996b | ||
|
|
31584f4451 | ||
|
|
d4cdb633db | ||
|
|
11939832df | ||
|
|
ebadce7958 | ||
|
|
4d5d5b74ee | ||
|
|
ff12fae2c9 | ||
|
|
8e907b19dc | ||
|
|
93ec16e14f | ||
|
|
8863b1fb2c | ||
|
|
484ea74ed0 | ||
|
|
f21e280b5c | ||
|
|
e0643c694c | ||
|
|
508d188fb6 | ||
|
|
e7d74da160 | ||
|
|
d1f7258356 | ||
|
|
3a5ab37153 | ||
|
|
419ed24c1e | ||
|
|
7cc689b0d9 | ||
|
|
5941cceb7c | ||
|
|
088caf5337 | ||
|
|
08852bd2fc | ||
|
|
57d694197f | ||
|
|
fc4027b3f1 | ||
|
|
d14881b970 | ||
|
|
be9632302c | ||
|
|
2b5ddd7c21 | ||
|
|
8109c962f0 | ||
|
|
af198237ca | ||
|
|
1559ff15ec | ||
|
|
67ff8d3460 | ||
|
|
6cbfbc3186 | ||
|
|
195c2db5ef | ||
|
|
50994bbb3b | ||
|
|
7b03699fbe | ||
|
|
8a4cb76508 | ||
|
|
bc3099793f | ||
|
|
b76d229da0 | ||
|
|
fe3994bf1d | ||
|
|
0ae09e6713 | ||
|
|
2b4d5542aa | ||
|
|
6b0f0c00b1 | ||
|
|
09760c0d69 | ||
|
|
8f14466cbb | ||
|
|
fe8a56f81a | ||
|
|
59b30262ac | ||
|
|
9efcec38cc | ||
|
|
5972bd0060 | ||
|
|
239cc0d7ca | ||
|
|
3fb915450a | ||
|
|
4b6ded0a01 | ||
|
|
0953ffa62b | ||
|
|
66727f76c8 | ||
|
|
7ee333b5cd | ||
|
|
3b34e203dc | ||
|
|
e808190dd2 | ||
|
|
d2cb9d7685 | ||
|
|
5ba6a2c78d | ||
|
|
fc3ca9bb92 | ||
|
|
a94a19bb36 | ||
|
|
a9be5f6557 | ||
|
|
3600f20cd3 | ||
|
|
81a00fff3e | ||
|
|
31474098ff | ||
|
|
7ff6ff6afa | ||
|
|
56529a1c2f |
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -2,6 +2,7 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
|
||||
33
.github/workflows/release.yml
vendored
33
.github/workflows/release.yml
vendored
@@ -45,6 +45,12 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: check
|
||||
outputs:
|
||||
linux-x64-sha: ${{ steps.sha.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha: ${{ steps.sha.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
@@ -101,7 +107,19 @@ jobs:
|
||||
with:
|
||||
name: runner-packages
|
||||
path: _package
|
||||
|
||||
# compute shas and set as job outputs to use in release notes
|
||||
- run: brew install coreutils #needed for shasum util
|
||||
if: ${{ matrix.os == 'macOS-latest' }}
|
||||
name: Install Dependencies for SHA Calculation (osx)
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
shell: bash
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
@@ -126,11 +144,15 @@ jobs:
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
const releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA>/g, '${{needs.build.outputs.win-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA>/g, '${{needs.build.outputs.osx-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
core.setOutput('note', releaseNote);
|
||||
# Create GitHub release
|
||||
- uses: actions/create-release@master
|
||||
id: createRelease
|
||||
@@ -142,7 +164,6 @@ jobs:
|
||||
release_name: "v${{ steps.releaseNote.outputs.version }}"
|
||||
body: |
|
||||
${{ steps.releaseNote.outputs.note }}
|
||||
prerelease: true
|
||||
|
||||
# Upload release assets
|
||||
- name: Upload Release Asset (win-x64)
|
||||
@@ -193,4 +214,4 @@ jobs:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -8,10 +8,12 @@
|
||||
**/*.xproj
|
||||
**/*.xproj.user
|
||||
**/.vs
|
||||
**/.vscode
|
||||
**/*.error
|
||||
**/*.json.pretty
|
||||
.idea/
|
||||
.vscode
|
||||
!.vscode/launch.json
|
||||
!.vscode/tasks.json
|
||||
|
||||
# output
|
||||
node_modules
|
||||
@@ -22,7 +24,4 @@ _dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
.DS_Store
|
||||
**/*.DotSettings.user
|
||||
|
||||
#generated
|
||||
src/Runner.Sdk/BuildConstants.cs
|
||||
**/*.DotSettings.user
|
||||
57
.vscode/launch.json
vendored
Normal file
57
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Run [build]",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build runner layout",
|
||||
"program": "${workspaceFolder}/_layout/bin/Runner.Listener",
|
||||
"args": [
|
||||
"run"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
},
|
||||
{
|
||||
"name": "Run",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/_layout/bin/Runner.Listener",
|
||||
"args": [
|
||||
"run"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
},
|
||||
{
|
||||
"name": "Configure",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "create runner layout",
|
||||
"program": "${workspaceFolder}/_layout/bin/Runner.Listener",
|
||||
"args": [
|
||||
"configure"
|
||||
],
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
},
|
||||
{
|
||||
"name": "Debug Worker",
|
||||
"type": "coreclr",
|
||||
"request": "attach",
|
||||
"processName": "Runner.Worker",
|
||||
"requireExactSource": false,
|
||||
},
|
||||
{
|
||||
"name": "Attach Debugger",
|
||||
"type": "coreclr",
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}",
|
||||
"requireExactSource": false,
|
||||
},
|
||||
],
|
||||
}
|
||||
33
.vscode/tasks.json
vendored
Normal file
33
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "create runner layout",
|
||||
"detail": "Build and Copy all projects, scripts and external dependencies to _layout from src (run this the first time or after deleting _layout)",
|
||||
"command": "./dev.sh",
|
||||
"windows": {
|
||||
"command": "dev.cmd"
|
||||
},
|
||||
"args": [
|
||||
"layout"
|
||||
],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
},
|
||||
},
|
||||
{
|
||||
"label": "build runner layout",
|
||||
"detail": "Build and Copy all projects to _layout from src (run this on code change)",
|
||||
"command": "./dev.sh",
|
||||
"windows": {
|
||||
"command": "dev.cmd"
|
||||
},
|
||||
"args": [
|
||||
"build"
|
||||
],
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/src"
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
||||
* @actions/actions-runtime
|
||||
@@ -5,6 +5,7 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
[](https://github.com/actions/runner/actions)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# ADR 263: Self Hosted Runner Proxies
|
||||
# ADR 263: Self-Hosted Runner Proxies
|
||||
|
||||
**Date**: 2019-11-13
|
||||
|
||||
@@ -6,13 +6,13 @@
|
||||
|
||||
## Context
|
||||
|
||||
- Proxy support is required for some enterprises and organizations to start using their own self hosted runners
|
||||
- While there is not a standard convention, many applications support setting proxies via the environmental variables `http_proxy`, `https_proxy`, `no_proxy`, such as curl, wget, perl, python, docker, git, R, ect
|
||||
- Proxy support is required for some enterprises and organizations to start using their own self-hosted runners
|
||||
- While there is not a standard convention, many applications support setting proxies via the environment variables `http_proxy`, `https_proxy`, `no_proxy`, such as curl, wget, perl, python, docker, git, and R
|
||||
- Some of these applications use `HTTPS_PROXY` versus `https_proxy`, but most understand or primarily support the lowercase variant
|
||||
|
||||
## Decision
|
||||
|
||||
We will update the Runner to use the conventional environment variables for proxies: `http_proxy`, `https_proxy` and `no_proxy` if they are set.
|
||||
We will update the Runner to use the conventional environment variables for proxies: `http_proxy`, `https_proxy`, and `no_proxy` if they are set.
|
||||
These are described in detail below:
|
||||
- `https_proxy` a proxy URL for all https traffic. It may contain basic authentication credentials. For example:
|
||||
- http://proxy.com
|
||||
@@ -22,20 +22,20 @@ These are described in detail below:
|
||||
- http://proxy.com
|
||||
- http://127.0.0.1:8080
|
||||
- http://user:password@proxy.com
|
||||
- `no_proxy` a comma separated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `no_proxy` a comma-separated list of hosts that should not use the proxy. An optional port may be specified. For example:
|
||||
- `google.com`
|
||||
- `yahoo.com:443`
|
||||
- `google.com,bing.com`
|
||||
|
||||
We won't use `http_proxy` for https traffic when `https_proxy` is not set, this behavior lines up with any libcurl based tools (curl, git) and wget.
|
||||
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||
Otherwise, action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||
|
||||
Example:
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to the server without any problem. However, if a user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it requires `https_proxy` to be set for any https traffic.
|
||||
Customer sets `http_proxy=http://127.0.0.1:8888` and configures the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to the server without any problem. However, if a user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it requires `https_proxy` to be set for any https traffic.
|
||||
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic based on my research.
|
||||
> `golang`, `node.js`, and other dev tools from the Linux community use `http_proxy` for both http and https traffic based on my research.
|
||||
|
||||
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
|
||||
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self-hosted runners to set up a proxy and expose it in a way users are already familiar with.
|
||||
|
||||
A password provided for a proxy will be masked in the logs.
|
||||
|
||||
@@ -43,19 +43,19 @@ We will support the lowercase and uppercase variants, with lowercase taking prio
|
||||
|
||||
### No Proxy Format
|
||||
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insensitive matches, and not support wildcards at this time.
|
||||
While exact implementations are different per application on handling `no_proxy` env, most applications accept a comma-separated list of hosts. Some accept wildcard characters (`*`). We are going to do exact case-insensitive matches, and not support wildcards at this time.
|
||||
For example:
|
||||
- example.com will match example.com, foo.example.com, foo.bar.example.com
|
||||
- foo.example.com will match bar.foo.example.com and foo.example.com
|
||||
- `example.com` will match `example.com`, `foo.example.com`, and `foo.bar.example.com`
|
||||
- `foo.example.com` will match `bar.foo.example.com` and `foo.example.com`
|
||||
|
||||
We will not support IP addresses for `no_proxy`, only hostnames.
|
||||
|
||||
## Consequences
|
||||
|
||||
1. Enterprises and organizations needing proxy support will be able to embrace self hosted runners
|
||||
2. Users will need to set these environmental variables before configuring the runner in order to use a proxy when configuring
|
||||
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
|
||||
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
|
||||
5. Action authors may need to update their workflows to react to the these environment variables
|
||||
6. We will document the way of setting environmental variables for runners using the environment variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||
1. Enterprises and organizations needing proxy support will be able to embrace self-hosted runners
|
||||
2. Users will need to set these environment variables before configuring the runner in order to use a proxy when configuring
|
||||
3. The runner will read from the environment variables during config and runtime and use the provided proxy if it exists
|
||||
4. Users may need to pass these environment variables into other applications if they do not natively take these variables
|
||||
5. Action authors may need to update their workflows to react to these environment variables
|
||||
6. We will document the way of setting environment variables for runners using the environment variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out the proxy password if they can modify a workflow file running on a self-hosted machine
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
run-actions run scripts using a platform specific shell:
|
||||
`bash -eo pipefail` on non-windows, and `cmd.exe /c /d /s` on windows
|
||||
|
||||
The `shell` option overwrites this to allow different flags or completely different shells/interpreters
|
||||
The `shell` option overrides this to allow different flags or completely different shells/interpreters
|
||||
|
||||
A small example is:
|
||||
```yml
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
First party action `actions/cache` needs a input which is an explicit `key` used for restoring and saving the cache. For packages caching, the most comment `key` might be the hash result of contents from all `package-lock.json` under `node_modules` folder.
|
||||
First party action `actions/cache` needs a input which is an explicit `key` used for restoring and saving the cache. For packages caching, the most common `key` might be the hash result of contents from all `package-lock.json` under `node_modules` folder.
|
||||
|
||||
There are serval different ways to get the hash `key` input for `actions/cache` action.
|
||||
|
||||
@@ -38,7 +38,7 @@ There are serval different ways to get the hash `key` input for `actions/cache`
|
||||
`hashFiles()` will only support hashing files under the `$GITHUB_WORKSPACE` since the expression evaluated on the runner, if customer use job container or container action, the runner won't have access to file system inside the container.
|
||||
|
||||
`hashFiles()` will only take 1 parameters:
|
||||
- `hashFiles('**/package-lock.json')` // Search files under $GITHUB_WORKSPACE and calculate a hash for them
|
||||
- `hashFiles('**/package-lock.json')` // Search files under `$GITHUB_WORKSPACE` and calculate a hash for them
|
||||
|
||||
**Question: Do we need to support more than one match patterns?**
|
||||
Ex: `hashFiles('**/package-lock.json', '!toolkit/core/package-lock.json', '!toolkit/io/package-lock.json')`
|
||||
@@ -52,7 +52,7 @@ This will help customer has better experience with the `actions/cache` action's
|
||||
key: ${{hashFiles('**/package-lock.json')}}-${{github.ref}}-${{runner.os}}
|
||||
```
|
||||
|
||||
For search pattern, we will use basic globbing (`*` `?` and `[]`) and globstar (`**`).
|
||||
For search pattern, we will use basic globbing (`*`, `?`, and `[]`) and globstar (`**`).
|
||||
|
||||
Additional pattern details:
|
||||
- Root relative paths with `github.workspace` (the main repo)
|
||||
@@ -68,4 +68,4 @@ Hashing logic:
|
||||
5. Use SHA256 to hash all stored files' hash results to get the final 64 chars hash result.
|
||||
|
||||
**Question: Should we include the folder structure info into the hash?**
|
||||
Answer: No
|
||||
Answer: No
|
||||
|
||||
@@ -15,7 +15,7 @@ This gives us good coverage across the board for secrets and secrets with a pref
|
||||
|
||||
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
|
||||
|
||||
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
|
||||
Most notably we've seen this as a result of user error where a user accidentally appends a newline or space character before encoding their secret in base64.
|
||||
|
||||
## Decision
|
||||
|
||||
@@ -45,4 +45,4 @@ This will result in us only revealing length or bit information when a prefix or
|
||||
|
||||
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
|
||||
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
|
||||
- Performance will suffer in a neglible way
|
||||
- Performance will suffer in a negligible way
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
|
||||
## Context
|
||||
|
||||
In addition to action's regular execution, action author may wants their action has a chance to participate in:
|
||||
- Job initialize
|
||||
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
|
||||
- Job cleanup
|
||||
In addition to action's regular execution, action author may wants their action to have a chance to participate in:
|
||||
- Job initialization
|
||||
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the beginning of the job.
|
||||
- Job cleanup
|
||||
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
|
||||
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
|
||||
|
||||
@@ -46,12 +46,12 @@ Container Action Example:
|
||||
post-if: 'success()' // Optional
|
||||
```
|
||||
|
||||
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
|
||||
Both `pre` and `post` will have default `pre-if/post-if` set to `always()`.
|
||||
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
|
||||
`pre` executes in order of how the steps are defined.
|
||||
`pre` will always be added to job steps list during job setup.
|
||||
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
|
||||
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
|
||||
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checked-out during job initialization.
|
||||
> We can't use GitHub api to download the repository since there is about a 3 minute delay between `git push` and the new commit available to download using GitHub api.
|
||||
|
||||
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
|
||||
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
|
||||
@@ -60,7 +60,7 @@ Valid action:
|
||||
- only has `main`
|
||||
- has `pre` and `main`
|
||||
- has `main` and `post`
|
||||
- has `pre`, `main` and `post`
|
||||
- has `pre`, `main`, and `post`
|
||||
|
||||
Invalid action:
|
||||
- only has `pre`
|
||||
|
||||
@@ -13,13 +13,13 @@ This is another version of [ADR275](https://github.com/actions/runner/pull/275)
|
||||
|
||||
## Decision
|
||||
|
||||
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
|
||||
This ADR proposes that we add a `--labels` option to the `config`, which could be used to add custom additional labels to the configured runner.
|
||||
|
||||
For example, to add a single extra label the operator could run:
|
||||
For example, to add a single additional label the operator could run:
|
||||
```bash
|
||||
./config.sh --labels mylabel
|
||||
```
|
||||
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
|
||||
> Note: the current runner command line parsing and envvar override algorithm only support a single argument (key).
|
||||
|
||||
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
|
||||
```yaml
|
||||
@@ -39,17 +39,17 @@ runs-on: [self-hosted, mylabel, anotherlabel]
|
||||
|
||||
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
|
||||
|
||||
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
|
||||
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commas in unattended config label names. Alternatively, we could choose to escape commas but it's a nice to have.
|
||||
|
||||
## Replace
|
||||
|
||||
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
|
||||
If an existing runner exists and the option to replace is chosen (interactively or via unattended as in this scenario), then the labels will be replaced/overwritten (not merged).
|
||||
|
||||
## Overriding built-in labels
|
||||
|
||||
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
|
||||
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org/runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
|
||||
|
||||
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
|
||||
We will also not make other restrictions such as limiting explicitly adding os/arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future-proofing / compatibility.
|
||||
|
||||
## Consequences
|
||||
|
||||
|
||||
@@ -8,17 +8,17 @@
|
||||
|
||||
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
|
||||
|
||||
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
|
||||
An important step towards meeting this goal is to build functionality for actions where users can simply execute any number of steps.
|
||||
|
||||
### Guiding Principles
|
||||
|
||||
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
|
||||
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to treat it as one logical step for the consumer.
|
||||
|
||||
A composite action is treated as **one** individual job step (this is known as encapsulation).
|
||||
|
||||
## Decision
|
||||
|
||||
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
|
||||
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to their parents' input variables and nested steps can overwrite the input variables).
|
||||
|
||||
### Composite Run Steps Features
|
||||
This feature supports at the top action level:
|
||||
@@ -92,7 +92,7 @@ We will not support "defaults" in a composite action.
|
||||
|
||||
### Shell and Working-directory
|
||||
|
||||
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
|
||||
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for its `shell` and `working-directory` attributes at the step level for an action.
|
||||
|
||||
For example,
|
||||
|
||||
@@ -218,9 +218,9 @@ Example Output:
|
||||
random-number 43243
|
||||
```
|
||||
|
||||
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
|
||||
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child's action output(s) are only viewable by its parent using dot notation (ex `steps.foo.outputs.random-number`).
|
||||
|
||||
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
|
||||
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is that we don't want to require the workflow author to know the internal workings of the composite action.
|
||||
|
||||
### Context
|
||||
|
||||
@@ -237,9 +237,9 @@ In the Composite Action, you'll only be able to use `::set-env::` to set environ
|
||||
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
|
||||
|
||||
|
||||
### If Condition
|
||||
### If-Condition
|
||||
|
||||
** If and needs conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
|
||||
** `If` and `needs` conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
|
||||
|
||||
Old reasoning:
|
||||
|
||||
@@ -248,7 +248,7 @@ Example `workflow.yml`:
|
||||
```yaml
|
||||
steps:
|
||||
- run: exit 1
|
||||
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
|
||||
- uses: user/composite@v1 # <--- this will run, as it's marked as always running
|
||||
if: always()
|
||||
```
|
||||
|
||||
@@ -269,15 +269,15 @@ runs:
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "if Condition" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
**We will not support "if-condition" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
|
||||
|
||||
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
|
||||
|
||||
**Note that the if condition on the parent does not propagate to the rest of its children though.**
|
||||
**Note that the "if-condition" on the parent does not propagate to the rest of its children though.**
|
||||
|
||||
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
|
||||
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if-conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** have not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if-condition is not set for a step.
|
||||
|
||||
|
||||
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
|
||||
@@ -314,13 +314,13 @@ runs:
|
||||
|
||||
**We will not support "timeout-minutes" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
|
||||
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
|
||||
|
||||
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
|
||||
If the time taken for any of the steps in combination or individually exceeds the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
|
||||
|
||||
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
|
||||
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail.
|
||||
|
||||
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
|
||||
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not affect other steps.
|
||||
|
||||
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
|
||||
|
||||
@@ -361,7 +361,7 @@ For the composite action steps, it follows the same logic as above. In this exam
|
||||
### Visualizing Composite Action in the GitHub Actions UI
|
||||
We want all the composite action's steps to be condensed into the original composite action node.
|
||||
|
||||
Here is a visual represenation of the [first example](#Steps)
|
||||
Here is a visual representation of the [first example](#Steps)
|
||||
|
||||
```yaml
|
||||
| composite_action_node |
|
||||
|
||||
@@ -11,7 +11,7 @@ export RUNNER_CFG_PAT=yourPAT
|
||||
|
||||
## Create running as a service
|
||||
|
||||
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||
**Scenario**: Run on a machine or VM ([not container](#why-cant-i-use-a-container)) which automates:
|
||||
|
||||
- Resolving latest released runner
|
||||
- Download and extract latest
|
||||
@@ -23,12 +23,16 @@ export RUNNER_CFG_PAT=yourPAT
|
||||
|
||||
Run as a one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
|
||||
```
|
||||
|
||||
### Why can't I use a container?
|
||||
|
||||
The runner is installed as a service using `systemd` and `systemctl`. Docker does not support `systemd` for service configuration on a container.
|
||||
|
||||
## Uninstall running as service
|
||||
|
||||
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||
**Scenario**: Run on a machine or VM ([not container](#why-cant-i-use-a-container)) which automates:
|
||||
|
||||
- Stops and uninstalls the systemd (linux) or Launchd (osx) service
|
||||
- Acquires a removal token
|
||||
@@ -38,7 +42,7 @@ curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create
|
||||
|
||||
Repo level one liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove-svc.sh | bash -s yourorg/yourrepo
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/remove-svc.sh | bash -s yourorg/yourrepo
|
||||
```
|
||||
|
||||
### Delete an offline runner
|
||||
@@ -53,5 +57,5 @@ curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove
|
||||
|
||||
Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level) and replace runnername
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/delete.sh | bash -s yourorg/yourrepo runnername
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/main/scripts/delete.sh | bash -s yourorg/yourrepo runnername
|
||||
```
|
||||
|
||||
45
docs/checks/actions.md
Normal file
45
docs/checks/actions.md
Normal file
@@ -0,0 +1,45 @@
|
||||
|
||||
# Actions Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access https://api.github.com for downloading actions.
|
||||
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
|
||||
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com or myGHES.com using dotnet
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for pipelines.actions.githubusercontent.com using dotnet
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `System.Net.Http.HttpRequestException: The SSL connection could not be established, see inner exception.` in the log, it means the runner can't connect to Actions service due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
34
docs/checks/git.md
Normal file
34
docs/checks/git.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Git Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure `git` can access GitHub.com or your GitHub Enterprise Server.
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
The test is done by executing
|
||||
```bash
|
||||
# For GitHub.com
|
||||
git ls-remote --exit-code https://github.com/actions/checkout HEAD
|
||||
|
||||
# For GitHub Enterprise Server
|
||||
git ls-remote --exit-code https://ghes.me/actions/checkout HEAD
|
||||
```
|
||||
|
||||
The test also set environment variable `GIT_TRACE=1` and `GIT_CURL_VERBOSE=1` before running `git ls-remote`, this will make `git` to produce debug log for better debug any potential issues.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `SSL Certificate problem:` in the log, it means the `git` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
26
docs/checks/internet.md
Normal file
26
docs/checks/internet.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Internet Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to https://api.github.com
|
||||
|
||||
The runner needs to access https://api.github.com to download any actions from the marketplace.
|
||||
|
||||
Even the runner is configured to GitHub Enterprise Server, the runner can still download actions from GitHub.com with [GitHub Connect](https://docs.github.com/en/enterprise-server@2.22/admin/github-actions/enabling-automatic-access-to-githubcom-actions-using-github-connect)
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com using dotnet
|
||||
- Ping api.github.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
32
docs/checks/network.md
Normal file
32
docs/checks/network.md
Normal file
@@ -0,0 +1,32 @@
|
||||
## Common Network Related Issues
|
||||
|
||||
### Common things that can cause the runner to not working properly
|
||||
|
||||
- Bug in the runner or the dotnet framework that causes actions runner can't make Http request in a certain network environment.
|
||||
|
||||
- Proxy/Firewall block certain HTTP method, like it block all POST and PUT calls which the runner will use to upload logs.
|
||||
|
||||
- Proxy/Firewall only allows requests with certain user-agent to pass through and the actions runner user-agent is not in the allow list.
|
||||
|
||||
- Proxy try to decrypt and exam HTTPS traffic for security purpose but cause the actions-runner to fail to finish SSL handshake due to the lack of trusting proxy's CA.
|
||||
|
||||
- Proxy try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||
|
||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc.
|
||||
|
||||
|
||||
### Identify and solve these problems
|
||||
|
||||
The key is to figure out where is the problem, the network environment, or the actions runner?
|
||||
|
||||
Use a 3rd party tool to make the same requests as the runner did would be a good start point.
|
||||
|
||||
- Use `nslookup` to check DNS
|
||||
- Use `ping` to check Ping
|
||||
- Use `traceroute`, `tracepath`, or `tracert` to check the network route between the runner and the Actions service
|
||||
- Use `curl -v` to check the network stack, good for verifying default certificate/proxy settings.
|
||||
- Use `Invoke-WebRequest` from `pwsh` (`PowerShell Core`) to check the dotnet network stack, good for verifying bugs in the dotnet framework.
|
||||
|
||||
If the 3rd party tool is also experiencing the same error as the runner does, then you might want to contact your network administrator for help.
|
||||
|
||||
Otherwise, contact GitHub customer support or log an issue at https://github.com/actions/runner
|
||||
30
docs/checks/nodejs.md
Normal file
30
docs/checks/nodejs.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Node.js Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
## What is checked?
|
||||
|
||||
- Make HTTPS GET to https://api.github.com or https://myGHES.com/api/v3 using node.js, make sure it gets 200 response code.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `Https request failed due to SSL cert issue` in the log, it means the `node.js` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
89
docs/checks/sslcert.md
Normal file
89
docs/checks/sslcert.md
Normal file
@@ -0,0 +1,89 @@
|
||||
## SSL Certificate Related Issues
|
||||
|
||||
You might run into an SSL certificate error when your GitHub Enterprise Server is using a self-signed SSL server certificate or a web proxy within your network is decrypting HTTPS traffic for a security audit.
|
||||
|
||||
As long as your certificate is generated properly, most of the issues should be fixed after your trust the certificate properly on the runner machine.
|
||||
|
||||
> Different OS might have extra requirements on SSL certificate,
|
||||
> Ex: macOS requires `ExtendedKeyUsage` https://support.apple.com/en-us/HT210176
|
||||
|
||||
### Don't skip SSL cert validation
|
||||
|
||||
> !!! DO NOT SKIP SSL CERT VALIDATION !!!
|
||||
> !!! IT IS A BAD SECURITY PRACTICE !!!
|
||||
|
||||
### Download SSL certificate chain
|
||||
|
||||
Depends on how your SSL server certificate gets configured, you might need to download the whole certificate chain from a machine that has trusted the SSL certificate's CA.
|
||||
|
||||
- Approach 1: Download certificate chain using a browser (Chrome, Firefox, IT), you can google for more example, [here is what I found](https://medium.com/@menakajain/export-download-ssl-certificate-from-server-site-url-bcfc41ea46a2)
|
||||
|
||||
- Approach 2: Download certificate chain using OpenSSL, you can google for more example, [here is what I found](https://superuser.com/a/176721)
|
||||
|
||||
- Approach 3: Ask your network administrator or the owner of the CA certificate to send you a copy of it
|
||||
|
||||
### Trust CA certificate for the Runner
|
||||
|
||||
The actions runner is a dotnet core application which will follow how dotnet load SSL CA certificates on each OS.
|
||||
|
||||
You can get full details documentation at [here](https://docs.microsoft.com/en-us/dotnet/standard/security/cross-platform-cryptography#x509store)
|
||||
|
||||
In short:
|
||||
- Windows: Load from Windows certificate store.
|
||||
- Linux: Load from OpenSSL CA cert bundle.
|
||||
- macOS: Load from macOS KeyChain.
|
||||
|
||||
To let the runner trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Use `OpenSSL` to convert `.pem` file to a proper format for different OS, here is some [doc with sample commands](https://www.sslshopper.com/ssl-converter.html)
|
||||
3. Trust CA on different OS:
|
||||
- Windows: https://docs.microsoft.com/en-us/skype-sdk/sdn/articles/installing-the-trusted-root-certificate
|
||||
- macOS: 
|
||||
- Linux: Refer to the distribution documentation
|
||||
1. RedHat: https://www.redhat.com/sysadmin/ca-certificates-cli
|
||||
2. Ubuntu: http://manpages.ubuntu.com/manpages/focal/man8/update-ca-certificates.8.html
|
||||
3. Google search: "trust ca certificate on [linux distribution]"
|
||||
4. If all approaches failed, set environment variable `SSL_CERT_FILE` to the CA bundle `.pem` file we get.
|
||||
> To verify cert gets installed properly on Linux, you can try use `curl -v https://sitewithsslissue.com` and `pwsh -Command \"Invoke-WebRequest -Uri https://sitewithsslissue.com\"`
|
||||
|
||||
### Trust CA certificate for Git CLI
|
||||
|
||||
Git uses various CA bundle file depends on your operation system.
|
||||
- Git packaged the CA bundle file within the Git installation on Windows
|
||||
- Git use OpenSSL certificate CA bundle file on Linux and macOS
|
||||
|
||||
You can check where Git check CA file by running:
|
||||
```bash
|
||||
export GIT_CURL_VERBOSE=1
|
||||
git ls-remote https://github.com/actions/runner HEAD
|
||||
```
|
||||
|
||||
You should see something like:
|
||||
```
|
||||
* Couldn't find host github.com in the .netrc file; using defaults
|
||||
* Trying 140.82.114.4...
|
||||
* TCP_NODELAY set
|
||||
* Connected to github.com (140.82.114.4) port 443 (#0)
|
||||
* ALPN, offering h2
|
||||
* ALPN, offering http/1.1
|
||||
* successfully set certificate verify locations:
|
||||
* CAfile: /etc/ssl/cert.pem
|
||||
CApath: none
|
||||
* SSL connection using TLSv1.2 / ECDHE-RSA-AES128-GCM-SHA256
|
||||
```
|
||||
This tells me `/etc/ssl/cert.pem` is where it read trusted CA certificates.
|
||||
|
||||
To let Git trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set `http.sslCAInfo` Git config or `GIT_SSL_CAINFO` environment variable to the full path of the `.pem` file [Git Doc](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httpsslCAInfo)
|
||||
> I would recommend using `http.sslCAInfo` since it can be scope to certain hosts that need the extra trusted CA.
|
||||
> Ex: `git config --global http.https://myghes.com/.sslCAInfo /extra/ca/cert.pem`
|
||||
> This will make Git use the `/extra/ca/cert.pem` only when communicates with `https://myghes.com` and keep using the default CA bundle with others.
|
||||
|
||||
### Trust CA certificate for Node.js
|
||||
|
||||
Node.js has compiled a snapshot of the Mozilla CA store that is fixed at each version of Node.js' release time.
|
||||
|
||||
To let Node.js trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set environment variable `NODE_EXTRA_CA_CERTS` which point to the file. ex: `export NODE_EXTRA_CA_CERTS=/full/path/to/cacert.pem` or `set NODE_EXTRA_CA_CERTS=C:\full\path\to\cacert.pem`
|
||||
@@ -19,12 +19,35 @@ We ask that before significant effort is put into code changes, that we have agr
|
||||
|
||||
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](adrs/README.md)
|
||||
|
||||
## Development Life Cycle
|
||||
|
||||
### Required Dev Dependencies
|
||||
## Required Dev Dependencies
|
||||
|
||||
  Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
|
||||
## Quickstart: Run a job from a real repository
|
||||
|
||||
If you just want to get from building the sourcecode to using it to execute an action, you will need:
|
||||
|
||||
- The url of your repository
|
||||
- A runner registration token. You can find it at `https://github.com/{your-repo}/settings/actions/runners/new`
|
||||
|
||||
|
||||
```bash
|
||||
git clone https://github.com/actions/runner
|
||||
cd runner/src
|
||||
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
|
||||
cd ../_layout
|
||||
./config.(sh/cmd) --url https://github.com/{your-repo} --token ABCABCABCABCABCABCABCABCABCAB # accept default name, labels and work folder
|
||||
./run.(sh/cmd)
|
||||
```
|
||||
|
||||
If you trigger a job now, you can see the runner execute it.
|
||||
|
||||
Tip: Make sure your job can run on this runner. The easiest way is to set `runs-on: self-hosted` in the workflow file.
|
||||
|
||||
|
||||
## Development Life Cycle
|
||||
If you're using VS Code, you can follow [these](contribute/vscode.md) steps instead.
|
||||
|
||||
### To Build, Test, Layout
|
||||
|
||||
Navigate to the `src` directory and run the following command:
|
||||
@@ -39,7 +62,7 @@ Navigate to the `src` directory and run the following command:
|
||||
* `build` (`b`): Build everything and update runner layout folder
|
||||
* `test` (`t`): Build runner binaries and run unit tests
|
||||
|
||||
Sample developer flow:
|
||||
### Sample developer flow:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/actions/runner
|
||||
@@ -51,25 +74,81 @@ cd ./src
|
||||
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
||||
```
|
||||
|
||||
View logs:
|
||||
Let's break that down.
|
||||
|
||||
### Clone repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/actions/runner
|
||||
cd runner
|
||||
```
|
||||
If you want to push your changes to a remote, it is recommended you fork the repository and use that fork as your origin instead of `https://github.com/actions/runner`.
|
||||
|
||||
|
||||
### Build Layout:
|
||||
|
||||
This command will build all projects, then copies them and other dependencies into a folder called `_layout`. The binaries in this folder are then used for running, debugging the runner.
|
||||
|
||||
```bash
|
||||
cd ./src # execute the script from this folder
|
||||
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
|
||||
```
|
||||
|
||||
If you make code changes after this point, use the argument `build` to build your code in the `src` folder to keep your `_layout` folder up to date.
|
||||
|
||||
```bash
|
||||
cd ./src
|
||||
./dev.(sh/cmd) build # {root}/_layout will get updated
|
||||
```
|
||||
### Test Layout:
|
||||
|
||||
This command runs the suite of unit tests in the project
|
||||
|
||||
```bash
|
||||
cd ./src
|
||||
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
||||
```
|
||||
|
||||
### Configure Runner:
|
||||
|
||||
If you want to manually test your runner and run actions from a real repository, you'll have to configure it before running it.
|
||||
|
||||
```bash
|
||||
cd runner/_layout
|
||||
./config.(sh/cmd) # configure your custom runner
|
||||
```
|
||||
|
||||
You will need your the name of your repository and a runner registration token.
|
||||
Check [Quickstart](##Quickstart:-Run-a-job-from-a-real-repository) if you don't know how to get this token.
|
||||
|
||||
These can also be passed down as arguments to `config.(sh/cmd)`:
|
||||
```bash
|
||||
cd runner/_layout
|
||||
./config.(sh/cmd) --url https://github.com/{your-repo} --token ABCABCABCABCABCABCABCABCABCAB
|
||||
```
|
||||
|
||||
### Run Runner
|
||||
|
||||
All that's left to do is to start the runner:
|
||||
```bash
|
||||
cd runner/_layout
|
||||
./run.(sh/cmd) # run your custom runner
|
||||
```
|
||||
|
||||
### View logs:
|
||||
|
||||
```bash
|
||||
cd runner/_layout/_diag
|
||||
ls
|
||||
cat (Runner/Worker)_TIMESTAMP.log # view your log file
|
||||
```
|
||||
|
||||
Run Runner:
|
||||
```bash
|
||||
cd runner/_layout
|
||||
./run.sh # run your custom runner
|
||||
```
|
||||
|
||||
### Editors
|
||||
## Editors
|
||||
|
||||
[Using Visual Studio Code](https://code.visualstudio.com/)
|
||||
[Using Visual Studio](https://code.visualstudio.com/docs)
|
||||
|
||||
### Styling
|
||||
## Styling
|
||||
|
||||
We use the .NET Foundation and CoreCLR style guidelines [located here](
|
||||
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)
|
||||
|
||||
52
docs/contribute/vscode.md
Normal file
52
docs/contribute/vscode.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# Development Life Cycle using VS Code:
|
||||
|
||||
These examples use VS Code, but the idea should be similar across all IDEs as long as you attach to the same processes in the right folder.
|
||||
## Configure
|
||||
|
||||
To successfully start the runner, you need to register it using a repository and a runner registration token.
|
||||
Run `Configure` first to build the source code and set up the runner in `_layout`.
|
||||
Once it's done creating `_layout`, it asks for the url of your repository and your token in the terminal.
|
||||
|
||||
Check [Quickstart](../contribute.md#quickstart-run-a-job-from-a-real-repository) if you don't know how to get this token.
|
||||
|
||||
## Debugging
|
||||
|
||||
Debugging the full lifecycle of a job can be tricky, because there are multiple processes involved.
|
||||
All the configs below can be found in `.vscode/launch.json`.
|
||||
|
||||
## Debug the Listener
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Run [build]",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build runner layout", // use the config called "Run" to launch without rebuild
|
||||
"program": "${workspaceFolder}/_layout/bin/Runner.Listener",
|
||||
"args": [
|
||||
"run" // run without args to print usage
|
||||
],
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
}
|
||||
```
|
||||
|
||||
If you launch `Run` or `Run [build]`, it starts a process called `Runner.Listener`.
|
||||
This process will receive any job queued on this repository if the job runs on matching labels (e.g `runs-on: self-hosted`).
|
||||
Once a job is received, a `Runner.Listener` starts a new process of `Runner.Worker`.
|
||||
Since this is a diferent process, you can't use the same debugger session debug it.
|
||||
Instead, a parallel debugging session has to be started, using a different launch config.
|
||||
Luckily, VS Code supports multiple parallel debugging sessions.
|
||||
|
||||
## Debug the Worker
|
||||
|
||||
Because the worker process is usually started by the listener instead of an IDE, debugging it from start to finish can be tricky.
|
||||
For this reason, `Runner.Worker` can be configured to wait for a debugger to be attached before it begins any actual work.
|
||||
|
||||
Set the environment variable `GITHUB_ACTIONS_RUNNER_ATTACH_DEBUGGER` to `true` or `1` to enable this wait.
|
||||
All worker processes now will wait 20 seconds before they start working on their task.
|
||||
|
||||
This gives enough time to attach a debugger by running `Debug Worker`.
|
||||
If for some reason you have multiple workers running, run the launch config `Attach` instead.
|
||||
Select `Runner.Worker` from the running processes when VS Code prompts for it.
|
||||
BIN
docs/res/macOStrustCA.gif
Normal file
BIN
docs/res/macOStrustCA.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 MiB |
@@ -5,6 +5,12 @@
|
||||
## Supported Versions
|
||||
|
||||
- macOS High Sierra (10.13) and later versions
|
||||
|
||||
## Apple Silicon M1
|
||||
|
||||
The runner is currently not supported on devices with an Apple M1 chip.
|
||||
We are waiting for official .NET support. You can read more here about the [current state of support here](https://github.com/orgs/dotnet/projects/18#card-56812463).
|
||||
Current .NET project board about M1 support:
|
||||
https://github.com/orgs/dotnet/projects/18#card-56812463
|
||||
|
||||
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore30)
|
||||
|
||||
@@ -1,13 +1,22 @@
|
||||
## Features
|
||||
- Add labels in the script that register runner (#844)
|
||||
- Add proxy support for container actions (#840)
|
||||
|
||||
- Add Job Message size to both Worker and Listener logs for debugging (#1100)
|
||||
- Add notice annotation level (in addition to error and warning) and support more annotation fields (#1175)
|
||||
|
||||
## Bugs
|
||||
- Unset GTIHUB_ACTION_REPOSITORY and GITHUB_ACTION_REF for non-repo based actions #804
|
||||
|
||||
- Remove the `NODE_ICU_DATA` environment variable that may cause conflicts with node within the runner. (#1060)
|
||||
- Handle cancelled jobs better to prevent orphaned processes (#1083)
|
||||
- No longer fail to remove a `systemd` service with `svc.sh uninstall` if the script had previously been run from the wrong location (#1135)
|
||||
- Send `SIGKILL` to the runner listener if it doesn't respond to `SIGINT` for 30 seconds
|
||||
- Match runner group name when configuring even if there's only a single runner group
|
||||
|
||||
|
||||
## Misc
|
||||
- Crypto cleanup and enable usage of FIPS compliant crypto when required (#806)
|
||||
- Count actions resolve failures as infra failures (#851)
|
||||
- Fix automation links in documentation (#1089)
|
||||
- Improve developer and first contributor experience by improving tooling for VS Code (#1101, #1117, #1119, #1132)
|
||||
- Fix bug where linux users are not able to run remove-svc.sh as root (#1127)
|
||||
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
@@ -45,7 +54,7 @@ curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>
|
||||
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Linux arm64 (Pre-release)
|
||||
## Linux arm64
|
||||
|
||||
``` bash
|
||||
# Create a folder
|
||||
@@ -56,7 +65,7 @@ curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>
|
||||
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Linux arm (Pre-release)
|
||||
## Linux arm
|
||||
|
||||
``` bash
|
||||
# Create a folder
|
||||
@@ -69,3 +78,13 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Using your self hosted runner
|
||||
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
|
||||
|
||||
## SHA-256 Checksums
|
||||
|
||||
The SHA-256 checksums for the packages included in this build are shown below:
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>.zip <!-- BEGIN SHA win-x64 --><WIN_X64_SHA><!-- END SHA win-x64 -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-x64 --><OSX_X64_SHA><!-- END SHA osx-x64 -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
@@ -1 +1 @@
|
||||
<Update to ./src/runnerversion when creating release>
|
||||
2.279.0
|
||||
|
||||
@@ -73,4 +73,4 @@ if [ "${runner_plat}" == "linux" ]; then
|
||||
fi
|
||||
${prefix}./svc.sh stop
|
||||
${prefix}./svc.sh uninstall
|
||||
${prefix}./config.sh remove --token $REMOVE_TOKEN
|
||||
./config.sh remove --token $REMOVE_TOKEN
|
||||
|
||||
589
src/Misc/dotnet-install.ps1
vendored
589
src/Misc/dotnet-install.ps1
vendored
@@ -23,8 +23,6 @@
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- coherent - most latest coherent build on specific channel
|
||||
coherent applies only to SDK downloads
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER InstallDir
|
||||
@@ -122,24 +120,42 @@ $VersionRegEx="/\d+\.\d+[^/]+/"
|
||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||
|
||||
function Say($str) {
|
||||
try
|
||||
{
|
||||
try {
|
||||
Write-Host "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Warning($str) {
|
||||
try {
|
||||
Write-Warning "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Warning (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: Warning: $str"
|
||||
}
|
||||
}
|
||||
|
||||
# Writes a line with error style settings.
|
||||
# Use this function to show a human-readable comment along with an exception.
|
||||
function Say-Error($str) {
|
||||
try {
|
||||
# Write-Error is quite oververbose for the purpose of the function, let's write one line with error style settings.
|
||||
$Host.UI.WriteErrorLine("dotnet-install: $str")
|
||||
}
|
||||
catch {
|
||||
Write-Output "dotnet-install: Error: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Verbose($str) {
|
||||
try
|
||||
{
|
||||
try {
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
}
|
||||
catch
|
||||
{
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
@@ -156,7 +172,7 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
|
||||
|
||||
while ($true) {
|
||||
try {
|
||||
return $ScriptBlock.Invoke()
|
||||
return & $ScriptBlock
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
@@ -270,18 +286,41 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
|
||||
if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$ErrorMsg = "Failed to download $Uri."
|
||||
if ($Response -ne $null) {
|
||||
$ErrorMsg += " $Response"
|
||||
$Task = $HttpClient.GetAsync("${Uri}${FeedCredential}").ConfigureAwait("false");
|
||||
$Response = $Task.GetAwaiter().GetResult();
|
||||
|
||||
if (($null -eq $Response) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
if ($null -ne $Response) {
|
||||
$DownloadException.Data["StatusCode"] = [int] $Response.StatusCode
|
||||
$DownloadException.Data["ErrorMessage"] = "Unable to download $Uri. Returned HTTP status code: " + $DownloadException.Data["StatusCode"]
|
||||
}
|
||||
|
||||
throw $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
|
||||
return $Response
|
||||
return $Response
|
||||
}
|
||||
catch [System.Net.Http.HttpRequestException] {
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
# Pick up the exception message and inner exceptions' messages if they exist
|
||||
$CurrentException = $PSItem.Exception
|
||||
$ErrorMsg = $CurrentException.Message + "`r`n"
|
||||
while ($CurrentException.InnerException) {
|
||||
$CurrentException = $CurrentException.InnerException
|
||||
$ErrorMsg += $CurrentException.Message + "`r`n"
|
||||
}
|
||||
|
||||
# Check if there is an issue concerning TLS.
|
||||
if ($ErrorMsg -like "*SSL/TLS*") {
|
||||
$ErrorMsg += "Ensure that TLS 1.2 or higher is enabled to use this script.`r`n"
|
||||
}
|
||||
|
||||
$DownloadException.Data["ErrorMessage"] = $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
finally {
|
||||
if ($HttpClient -ne $null) {
|
||||
@@ -291,7 +330,7 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$VersionFileUrl = $null
|
||||
@@ -301,17 +340,11 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
|
||||
}
|
||||
# Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
$VersionFileUrl = "$UncachedFeed/WindowsDesktop/$Channel/latest.version"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
if ($Coherent) {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
|
||||
}
|
||||
else {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
@@ -320,7 +353,8 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
catch {
|
||||
throw "Could not resolve version information."
|
||||
Say-Error "Could not resolve version information."
|
||||
throw
|
||||
}
|
||||
$StringContent = $Response.Content.ReadAsStringAsync().Result
|
||||
|
||||
@@ -346,7 +380,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
$JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
|
||||
}
|
||||
catch {
|
||||
throw "Json file unreadable: '$JSonFile'"
|
||||
Say-Error "Json file unreadable: '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
if ($JSonContent) {
|
||||
try {
|
||||
@@ -359,7 +394,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
}
|
||||
}
|
||||
catch {
|
||||
throw "Unable to parse the SDK node in '$JSonFile'"
|
||||
Say-Error "Unable to parse the SDK node in '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -375,16 +411,12 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
switch ($Version.ToLower()) {
|
||||
{ $_ -eq "latest" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
{ $_ -eq "coherent" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
default { return $Version }
|
||||
if ($Version.ToLower() -eq "latest") {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
else {
|
||||
return $Version
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -405,7 +437,16 @@ function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)$')
|
||||
{
|
||||
$majorVersion = [int]$Matches[1]
|
||||
if ($majorVersion -ge 5)
|
||||
{
|
||||
$PayloadURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
@@ -447,7 +488,16 @@ function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)')
|
||||
{
|
||||
$majorVersion = [int]$Matches[1]
|
||||
if ($majorVersion -ge 5)
|
||||
{
|
||||
$ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
@@ -619,6 +669,23 @@ function DownloadFile($Source, [string]$OutPath) {
|
||||
}
|
||||
}
|
||||
|
||||
function SafeRemoveFile($Path) {
|
||||
try {
|
||||
if (Test-Path $Path) {
|
||||
Remove-Item $Path
|
||||
Say-Verbose "The temporary file `"$Path`" was removed."
|
||||
}
|
||||
else
|
||||
{
|
||||
Say-Verbose "The temporary file `"$Path`" does not exist, therefore is not removed."
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
Say-Warning "Failed to remove the temporary file: `"$Path`", remove it manually."
|
||||
}
|
||||
}
|
||||
|
||||
function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
|
||||
$BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
|
||||
if (-Not $NoPath) {
|
||||
@@ -673,7 +740,7 @@ if ($DryRun) {
|
||||
Say "NOTE: Due to finding a version manifest with this runtime, it would actually install with version '$EffectiveVersion'"
|
||||
}
|
||||
|
||||
exit 0
|
||||
return
|
||||
}
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
@@ -707,7 +774,7 @@ $isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -Relat
|
||||
if ($isAssetInstalled) {
|
||||
Say "$assetName version $SpecificVersion is already installed."
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
exit 0
|
||||
return
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
@@ -715,30 +782,69 @@ New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
Say "There is not enough disk space on drive ${installDrive}:"
|
||||
exit 0
|
||||
throw "There is not enough disk space on drive ${installDrive}:"
|
||||
}
|
||||
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Zip path: $ZipPath"
|
||||
|
||||
$DownloadFailed = $false
|
||||
Say "Downloading link: $DownloadLink"
|
||||
|
||||
$PrimaryDownloadStatusCode = 0
|
||||
$LegacyDownloadStatusCode = 0
|
||||
|
||||
$PrimaryDownloadFailedMsg = ""
|
||||
$LegacyDownloadFailedMsg = ""
|
||||
|
||||
Say "Downloading primary link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$PrimaryDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($PrimaryDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
if ($LegacyDownloadLink) {
|
||||
$DownloadLink = $LegacyDownloadLink
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Legacy zip path: $ZipPath"
|
||||
Say "Downloading legacy link: $DownloadLink"
|
||||
Say "Downloading legacy link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$LegacyDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($LegacyDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
@@ -748,7 +854,19 @@ catch {
|
||||
}
|
||||
|
||||
if ($DownloadFailed) {
|
||||
throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
if (($PrimaryDownloadStatusCode -eq 404) -and ((-not $LegacyDownloadLink) -or ($LegacyDownloadStatusCode -eq 404))) {
|
||||
throw "Could not find `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
} else {
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if ($PrimaryDownloadStatusCode -ne 404) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$PrimaryDownloadFailedMsg"
|
||||
}
|
||||
if (($LegacyDownloadLink) -and ($LegacyDownloadStatusCode -ne 404)) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$LegacyDownloadFailedMsg"
|
||||
}
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion"
|
||||
}
|
||||
}
|
||||
|
||||
Say "Extracting zip from $DownloadLink"
|
||||
@@ -770,207 +888,208 @@ if (!$isAssetInstalled) {
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
}
|
||||
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
if (!$isAssetInstalled) {
|
||||
Say-Error "Failed to verify the version of installed `"$assetName`".`nInstallation source: $DownloadLink.`nInstallation location: $InstallRoot.`nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
|
||||
}
|
||||
|
||||
Remove-Item $ZipPath
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
|
||||
Say "Note that the script does not resolve dependencies during installation."
|
||||
Say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install/windows#dependencies"
|
||||
Say "Installation finished"
|
||||
exit 0
|
||||
# SIG # Begin signature block
|
||||
# MIIjlgYJKoZIhvcNAQcCoIIjhzCCI4MCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# MIIjjwYJKoZIhvcNAQcCoIIjgDCCI3wCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCA+isugNMwZSGLd
|
||||
# kfBd0C2Ud//U2Nbj31s1jg3Yf9gh4KCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
|
||||
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCNsnhcJvx/hXmM
|
||||
# w8KjuvvIMDBFonhg9XJFc1QwfTyH4aCCDYEwggX/MIID56ADAgECAhMzAAABh3IX
|
||||
# chVZQMcJAAAAAAGHMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ3WhcNMjEwMzAzMTgzOTQ3WjB0MQsw
|
||||
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
|
||||
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
|
||||
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
|
||||
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
|
||||
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
|
||||
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
|
||||
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
|
||||
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
|
||||
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
|
||||
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
|
||||
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
|
||||
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
|
||||
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
|
||||
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
|
||||
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
|
||||
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
|
||||
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
|
||||
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
|
||||
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
|
||||
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
|
||||
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
|
||||
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
|
||||
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
|
||||
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
|
||||
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
|
||||
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
|
||||
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
|
||||
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
|
||||
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
|
||||
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
|
||||
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
|
||||
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
|
||||
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
|
||||
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
|
||||
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
|
||||
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
|
||||
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
|
||||
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
|
||||
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
|
||||
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
|
||||
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
|
||||
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
|
||||
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
|
||||
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
|
||||
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
|
||||
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
|
||||
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
|
||||
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
|
||||
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
|
||||
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
|
||||
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
|
||||
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
|
||||
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
|
||||
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
|
||||
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
|
||||
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
|
||||
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
|
||||
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
|
||||
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
|
||||
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
|
||||
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
|
||||
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
|
||||
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFWcwghVjAgEBMIGVMH4x
|
||||
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
|
||||
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
|
||||
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
|
||||
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIK4I
|
||||
# CDH7/r/eeMqTtDETJ67ogfneVRo0/P6ogV2vy4tXMEIGCisGAQQBgjcCAQwxNDAy
|
||||
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20wDQYJKoZIhvcNAQEBBQAEggEAOnmVmILEjI6ZiuuSOvvTvijidkBez61Vz97A
|
||||
# jV3AOsfmUvLpVaTVa1Mt2iPDuq1QLqRPaT7BD8PAUwr91pYllVgEd8NqivCIaCZg
|
||||
# QyIRiTmHQxbozWsLcjxMvX2VxSmNKDw7IOHzUbXtmiEGhygyZpdh/uiCj7ziSxp3
|
||||
# lQBR8mUE1NL9dxaxKWLhGeORqAepw6nId9oO+mHRh4JRK7uqZOFAES7/21M9vPZi
|
||||
# XYilJLgIoyMkvqYSdoouzn6+m74kgzkNkyK9GYz2mmO2BCMnai9Njze2d0+kY+37
|
||||
# kt10BmJDw3FHaZ+/fH/TMTgo0ZcAOicP9ccdIh/CzzpU52o+Q6GCEvEwghLtBgor
|
||||
# BgEEAYI3AwMBMYIS3TCCEtkGCSqGSIb3DQEHAqCCEsowghLGAgEDMQ8wDQYJYIZI
|
||||
# AWUDBAIBBQAwggFVBgsqhkiG9w0BCRABBKCCAUQEggFAMIIBPAIBAQYKKwYBBAGE
|
||||
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCBSbhMJwNER+BICn3iLUnPrP8dptyUphcFC
|
||||
# A/NsIgnPLwIGX4hEzP6WGBMyMDIwMTEwOTE0NDY1Mi4yMzNaMASAAgH0oIHUpIHR
|
||||
# MIHOMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQL
|
||||
# EyBNaWNyb3NvZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhh
|
||||
# bGVzIFRTUyBFU046MEE1Ni1FMzI5LTRENEQxJTAjBgNVBAMTHE1pY3Jvc29mdCBU
|
||||
# aW1lLVN0YW1wIFNlcnZpY2Wggg5EMIIE9TCCA92gAwIBAgITMwAAAScvbqPvkagZ
|
||||
# qAAAAAABJzANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||
# AQDOt8kLc7P3T7MKIhouYHewMFmnq8Ayu7FOhZCQabVwBp2VS4WyB2Qe4TQBT8aB
|
||||
# znANDEPjHKNdPT8Xz5cNali6XHefS8i/WXtF0vSsP8NEv6mBHuA2p1fw2wB/F0dH
|
||||
# sJ3GfZ5c0sPJjklsiYqPw59xJ54kM91IOgiO2OUzjNAljPibjCWfH7UzQ1TPHc4d
|
||||
# weils8GEIrbBRb7IWwiObL12jWT4Yh71NQgvJ9Fn6+UhD9x2uk3dLj84vwt1NuFQ
|
||||
# itKJxIV0fVsRNR3abQVOLqpDugbr0SzNL6o8xzOHL5OXiGGwg6ekiXA1/2XXY7yV
|
||||
# Fc39tledDtZjSjNbex1zzwSXAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUhov4ZyO96axkJdMjpzu2zVXOJcsw
|
||||
# UAYDVR0RBEkwR6RFMEMxKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVyYXRpb25zIFB1
|
||||
# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDU4Mzg1MB8GA1UdIwQYMBaAFEhu
|
||||
# ZOVQBdOCqhc3NyK1bajKdQKVMFQGA1UdHwRNMEswSaBHoEWGQ2h0dHA6Ly93d3cu
|
||||
# bWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY0NvZFNpZ1BDQTIwMTFfMjAxMS0w
|
||||
# Ny0wOC5jcmwwYQYIKwYBBQUHAQEEVTBTMFEGCCsGAQUFBzAChkVodHRwOi8vd3d3
|
||||
# Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY0NvZFNpZ1BDQTIwMTFfMjAx
|
||||
# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAixmy
|
||||
# S6E6vprWD9KFNIB9G5zyMuIjZAOuUJ1EK/Vlg6Fb3ZHXjjUwATKIcXbFuFC6Wr4K
|
||||
# NrU4DY/sBVqmab5AC/je3bpUpjtxpEyqUqtPc30wEg/rO9vmKmqKoLPT37svc2NV
|
||||
# BmGNl+85qO4fV/w7Cx7J0Bbqk19KcRNdjt6eKoTnTPHBHlVHQIHZpMxacbFOAkJr
|
||||
# qAVkYZdz7ikNXTxV+GRb36tC4ByMNxE2DF7vFdvaiZP0CVZ5ByJ2gAhXMdK9+usx
|
||||
# zVk913qKde1OAuWdv+rndqkAIm8fUlRnr4saSCg7cIbUwCCf116wUJ7EuJDg0vHe
|
||||
# yhnCeHnBbyH3RZkHEi2ofmfgnFISJZDdMAeVZGVOh20Jp50XBzqokpPzeZ6zc1/g
|
||||
# yILNyiVgE+RPkjnUQshd1f1PMgn3tns2Cz7bJiVUaqEO3n9qRFgy5JuLae6UweGf
|
||||
# AeOo3dgLZxikKzYs3hDMaEtJq8IP71cX7QXe6lnMmXU/Hdfz2p897Zd+kU+vZvKI
|
||||
# 3cwLfuVQgK2RZ2z+Kc3K3dRPz2rXycK5XCuRZmvGab/WbrZiC7wJQapgBodltMI5
|
||||
# GMdFrBg9IeF7/rP4EqVQXeKtevTlZXjpuNhhjuR+2DMt/dWufjXpiW91bo3aH6Ea
|
||||
# jOALXmoxgltCp1K7hrS6gmsvj94cLRf50QQ4U8Qwggd6MIIFYqADAgECAgphDpDS
|
||||
# AAAAAAADMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
|
||||
# IENvcnBvcmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0Eg
|
||||
# MjAxMDAeFw0xOTEyMTkwMTE0NTlaFw0yMTAzMTcwMTE0NTlaMIHOMQswCQYDVQQG
|
||||
# EwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwG
|
||||
# A1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3NvZnQg
|
||||
# T3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046
|
||||
# MEE1Ni1FMzI5LTRENEQxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNl
|
||||
# cnZpY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD4Ad5xEZ5On0uN
|
||||
# L71ng9xwoDPRKeMUyEIj5yVxPRPh5GVbU7D3pqDsoXzQMhfeRP61L1zlU1HCRS+1
|
||||
# 29eo0yj1zjbAlmPAwosUgyIonesWt9E4hFlXCGUcIg5XMdvQ+Ouzk2r+awNRuk8A
|
||||
# BGOa0I4VBy6zqCYHyX2pGauiB43frJSNP6pcrO0CBmpBZNjgepof5Z/50vBuJDUS
|
||||
# ug6OIMQ7ZwUhSzX4bEmZUUjAycBb62dhQpGqHsXe6ypVDTgAEnGONdSBKkHiNT8H
|
||||
# 0Zt2lm0vCLwHyTwtgIdi67T/LCp+X2mlPHqXsY3u72X3GYn/3G8YFCkrSc6m3b0w
|
||||
# TXPd5/2fAgMBAAGjggEbMIIBFzAdBgNVHQ4EFgQU5fSWVYBfOTEkW2JTiV24WNNt
|
||||
# lfIwHwYDVR0jBBgwFoAU1WM6XIoxkPNDe3xGG8UzaFqFbVUwVgYDVR0fBE8wTTBL
|
||||
# oEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQuY29tL3BraS9jcmwvcHJvZHVjdHMv
|
||||
# TWljVGltU3RhUENBXzIwMTAtMDctMDEuY3JsMFoGCCsGAQUFBwEBBE4wTDBKBggr
|
||||
# BgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNU
|
||||
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcnQwDAYDVR0TAQH/BAIwADATBgNVHSUEDDAK
|
||||
# BggrBgEFBQcDCDANBgkqhkiG9w0BAQsFAAOCAQEACsqNfNFVxwalZ42cEMuzZc12
|
||||
# 6Nvluanx8UewDVeUQZEZHRmppMFHAzS/g6RzmxTyR2tKE3mChNGW5dTL730vEbRh
|
||||
# nYRmBgiX/gT3f4AQrOPnZGXY7zszcrlbgzxpakOX+x0u4rkP3Ashh3B2CdJ11XsB
|
||||
# di5PiZa1spB6U5S8D15gqTUfoIniLT4v1DBdkWExsKI1vsiFcDcjGJ4xRlMRF+fw
|
||||
# 7SY0WZoOzwRzKxDTdg4DusAXpaeKbch9iithLFk/vIxQrqCr/niW8tEA+eSzeX/E
|
||||
# q1D0ZyvOn4e2lTnwoJUKH6OQAWSBogyK4OCbFeJOqdKAUiBTgHKkQIYh/tbKQjCC
|
||||
# BnEwggRZoAMCAQICCmEJgSoAAAAAAAIwDQYJKoZIhvcNAQELBQAwgYgxCzAJBgNV
|
||||
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
|
||||
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xMjAwBgNVBAMTKU1pY3Jvc29m
|
||||
# dCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAyMDEwMB4XDTEwMDcwMTIxMzY1
|
||||
# NVoXDTI1MDcwMTIxNDY1NVowfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hp
|
||||
# bmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jw
|
||||
# b3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAw
|
||||
# ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpHQ28dxGKOiDs/BOX9fp/
|
||||
# aZRrdFQQ1aUKAIKF++18aEssX8XD5WHCdrc+Zitb8BVTJwQxH0EbGpUdzgkTjnxh
|
||||
# MFmxMEQP8WCIhFRDDNdNuDgIs0Ldk6zWczBXJoKjRQ3Q6vVHgc2/JGAyWGBG8lhH
|
||||
# hjKEHnRhZ5FfgVSxz5NMksHEpl3RYRNuKMYa+YaAu99h/EbBJx0kZxJyGiGKr0tk
|
||||
# iVBisV39dx898Fd1rL2KQk1AUdEPnAY+Z3/1ZsADlkR+79BL/W7lmsqxqPJ6Kgox
|
||||
# 8NpOBpG2iAg16HgcsOmZzTznL0S6p/TcZL2kAcEgCZN4zfy8wMlEXV4WnAEFTyJN
|
||||
# AgMBAAGjggHmMIIB4jAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQU1WM6XIox
|
||||
# kPNDe3xGG8UzaFqFbVUwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0P
|
||||
# BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU1fZWy4/oolxiaNE9
|
||||
# lJBb186aGMQwVgYDVR0fBE8wTTBLoEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQu
|
||||
# Y29tL3BraS9jcmwvcHJvZHVjdHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3Js
|
||||
# MFoGCCsGAQUFBwEBBE4wTDBKBggrBgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3Nv
|
||||
# ZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcnQwgaAG
|
||||
# A1UdIAEB/wSBlTCBkjCBjwYJKwYBBAGCNy4DMIGBMD0GCCsGAQUFBwIBFjFodHRw
|
||||
# Oi8vd3d3Lm1pY3Jvc29mdC5jb20vUEtJL2RvY3MvQ1BTL2RlZmF1bHQuaHRtMEAG
|
||||
# CCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAFAAbwBsAGkAYwB5AF8AUwB0AGEA
|
||||
# dABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQAH5ohRDeLG4Jg/gXED
|
||||
# PZ2joSFvs+umzPUxvs8F4qn++ldtGTCzwsVmyWrf9efweL3HqJ4l4/m87WtUVwgr
|
||||
# UYJEEvu5U4zM9GASinbMQEBBm9xcF/9c+V4XNZgkVkt070IQyK+/f8Z/8jd9Wj8c
|
||||
# 8pl5SpFSAK84Dxf1L3mBZdmptWvkx872ynoAb0swRCQiPM/tA6WWj1kpvLb9BOFw
|
||||
# nzJKJ/1Vry/+tuWOM7tiX5rbV0Dp8c6ZZpCM/2pif93FSguRJuI57BlKcWOdeyFt
|
||||
# w5yjojz6f32WapB4pm3S4Zz5Hfw42JT0xqUKloakvZ4argRCg7i1gJsiOCC1JeVk
|
||||
# 7Pf0v35jWSUPei45V3aicaoGig+JFrphpxHLmtgOR5qAxdDNp9DvfYPw4TtxCd9d
|
||||
# dJgiCGHasFAeb73x4QDf5zEHpJM692VHeOj4qEir995yfmFrb3epgcunCaw5u+zG
|
||||
# y9iCtHLNHfS4hQEegPsbiSpUObJb2sgNVZl6h3M7COaYLeqN4DMuEin1wC9UJyH3
|
||||
# yKxO2ii4sanblrKnQqLJzxlBTeCG+SqaoxFmMNO7dDJL32N79ZmKLxvHIa9Zta7c
|
||||
# RDyXUHHXodLFVeNp3lfB0d4wwP3M5k37Db9dT+mdHhk4L7zPWAUu7w2gUDXa7wkn
|
||||
# HNWzfjUeCLraNtvTX4/edIhJEqGCAtIwggI7AgEBMIH8oYHUpIHRMIHOMQswCQYD
|
||||
# VQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEe
|
||||
# MBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3Nv
|
||||
# ZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBF
|
||||
# U046MEE1Ni1FMzI5LTRENEQxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1w
|
||||
# IFNlcnZpY2WiIwoBATAHBgUrDgMCGgMVALOVuE5sgxzETO4s+poBqI6r1x8zoIGD
|
||||
# MIGApH4wfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNV
|
||||
# BAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQG
|
||||
# A1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEF
|
||||
# BQACBQDjU7byMCIYDzIwMjAxMTA5MTYzOTE0WhgPMjAyMDExMTAxNjM5MTRaMHcw
|
||||
# PQYKKwYBBAGEWQoEATEvMC0wCgIFAONTtvICAQAwCgIBAAICIt0CAf8wBwIBAAIC
|
||||
# EcQwCgIFAONVCHICAQAwNgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAK
|
||||
# MAgCAQACAwehIKEKMAgCAQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQAQhyIIAC/A
|
||||
# P+VJdbhL9IQgm8WTa1DmPPE+BQSuRbBy2MmzC1KostixdEkr2OaNSjcYuZBNIJgv
|
||||
# vE8CWhVDD+sbBpVcOdoSfoBwHXKfvqSTiWvovoexkF0X5aon7yr3PkJ/kEqoLyUM
|
||||
# xRvdWKJdHOL1sT0/aWHn048c6aGin/zc8DGCAw0wggMJAgEBMIGTMHwxCzAJBgNV
|
||||
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
|
||||
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29m
|
||||
# dCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABJy9uo++RqBmoAAAAAAEnMA0GCWCG
|
||||
# SAFlAwQCAQUAoIIBSjAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZI
|
||||
# hvcNAQkEMSIEIJZkrbvF4R8oqYYpN6ZPGOj+QEZTQriEi/Yw9gW6zMqRMIH6Bgsq
|
||||
# hkiG9w0BCRACLzGB6jCB5zCB5DCBvQQgG5LoSxKGHWoW/wVMlbMztlQ4upAdzEmq
|
||||
# H//vLu0jPiIwgZgwgYCkfjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGlu
|
||||
# Z3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBv
|
||||
# cmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAIT
|
||||
# MwAAAScvbqPvkagZqAAAAAABJzAiBCDwhEViCRvqKwQV3MxociF2iGYrDP4p1BK+
|
||||
# s4tStO4vSDANBgkqhkiG9w0BAQsFAASCAQAkgmDo8lVmar0ZIqTG1it3skG8PZC9
|
||||
# iqEEC1vxcz8OSfsjl2QSkQ5T2+3xWpxWA4uy2+Byv0bi8EsfQEnnn4vtdthS6/kb
|
||||
# vB/LLQiqoMhJ0rasf3/y/4KnQZEtztpg1+cCaNwFUgI6o+E8YEFt1frhLwFs/0WH
|
||||
# 5pyBFx9ECEs0M22SLIpW13gexv9fgk6ZboIfSreAI28DLveeJpkgwggxHRpuVOVD
|
||||
# 4D7QQJAvJ0VU6p+yJlbvQXR9iltwb1REhlsJ5mADJ/FkzPVX/swMSUIoyE2inlxK
|
||||
# LEiPkkZYwiFYCifFYUTnQjWU1Ls0EV+ysosL+jhzCxO8S6oRdp5TAi4F
|
||||
# IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZpY2F0
|
||||
# ZSBBdXRob3JpdHkgMjAxMTAeFw0xMTA3MDgyMDU5MDlaFw0yNjA3MDgyMTA5MDla
|
||||
# MH4xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS
|
||||
# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMT
|
||||
# H01pY3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTEwggIiMA0GCSqGSIb3DQEB
|
||||
# AQUAA4ICDwAwggIKAoICAQCr8PpyEBwurdhuqoIQTTS68rZYIZ9CGypr6VpQqrgG
|
||||
# OBoESbp/wwwe3TdrxhLYC/A4wpkGsMg51QEUMULTiQ15ZId+lGAkbK+eSZzpaF7S
|
||||
# 35tTsgosw6/ZqSuuegmv15ZZymAaBelmdugyUiYSL+erCFDPs0S3XdjELgN1q2jz
|
||||
# y23zOlyhFvRGuuA4ZKxuZDV4pqBjDy3TQJP4494HDdVceaVJKecNvqATd76UPe/7
|
||||
# 4ytaEB9NViiienLgEjq3SV7Y7e1DkYPZe7J7hhvZPrGMXeiJT4Qa8qEvWeSQOy2u
|
||||
# M1jFtz7+MtOzAz2xsq+SOH7SnYAs9U5WkSE1JcM5bmR/U7qcD60ZI4TL9LoDho33
|
||||
# X/DQUr+MlIe8wCF0JV8YKLbMJyg4JZg5SjbPfLGSrhwjp6lm7GEfauEoSZ1fiOIl
|
||||
# XdMhSz5SxLVXPyQD8NF6Wy/VI+NwXQ9RRnez+ADhvKwCgl/bwBWzvRvUVUvnOaEP
|
||||
# 6SNJvBi4RHxF5MHDcnrgcuck379GmcXvwhxX24ON7E1JMKerjt/sW5+v/N2wZuLB
|
||||
# l4F77dbtS+dJKacTKKanfWeA5opieF+yL4TXV5xcv3coKPHtbcMojyyPQDdPweGF
|
||||
# RInECUzF1KVDL3SV9274eCBYLBNdYJWaPk8zhNqwiBfenk70lrC8RqBsmNLg1oiM
|
||||
# CwIDAQABo4IB7TCCAekwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFEhuZOVQ
|
||||
# BdOCqhc3NyK1bajKdQKVMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1Ud
|
||||
# DwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFHItOgIxkEO5FAVO
|
||||
# 4eqnxzHRI4k0MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9jcmwubWljcm9zb2Z0
|
||||
# LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcmwwXgYIKwYBBQUHAQEEUjBQME4GCCsGAQUFBzAChkJodHRwOi8vd3d3Lm1p
|
||||
# Y3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcnQwgZ8GA1UdIASBlzCBlDCBkQYJKwYBBAGCNy4DMIGDMD8GCCsGAQUFBwIB
|
||||
# FjNodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2RvY3MvcHJpbWFyeWNw
|
||||
# cy5odG0wQAYIKwYBBQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AcABvAGwAaQBjAHkA
|
||||
# XwBzAHQAYQB0AGUAbQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAGfyhqWY
|
||||
# 4FR5Gi7T2HRnIpsLlhHhY5KZQpZ90nkMkMFlXy4sPvjDctFtg/6+P+gKyju/R6mj
|
||||
# 82nbY78iNaWXXWWEkH2LRlBV2AySfNIaSxzzPEKLUtCw/WvjPgcuKZvmPRul1LUd
|
||||
# d5Q54ulkyUQ9eHoj8xN9ppB0g430yyYCRirCihC7pKkFDJvtaPpoLpWgKj8qa1hJ
|
||||
# Yx8JaW5amJbkg/TAj/NGK978O9C9Ne9uJa7lryft0N3zDq+ZKJeYTQ49C/IIidYf
|
||||
# wzIY4vDFLc5bnrRJOQrGCsLGra7lstnbFYhRRVg4MnEnGn+x9Cf43iw6IGmYslmJ
|
||||
# aG5vp7d0w0AFBqYBKig+gj8TTWYLwLNN9eGPfxxvFX1Fp3blQCplo8NdUmKGwx1j
|
||||
# NpeG39rz+PIWoZon4c2ll9DuXWNB41sHnIc+BncG0QaxdR8UvmFhtfDcxhsEvt9B
|
||||
# xw4o7t5lL+yX9qFcltgA1qFGvVnzl6UJS0gQmYAf0AApxbGbpT9Fdx41xtKiop96
|
||||
# eiL6SJUfq/tHI4D1nvi/a7dLl+LrdXga7Oo3mXkYS//WsyNodeav+vyL6wuA6mk7
|
||||
# r/ww7QRMjt/fdW1jkT3RnVZOT7+AVyKheBEyIXrvQQqxP/uozKRdwaGIm1dxVk5I
|
||||
# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVZDCCFWACAQEwgZUwfjELMAkG
|
||||
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEoMCYGA1UEAxMfTWljcm9z
|
||||
# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAYdyF3IVWUDHCQAAAAABhzAN
|
||||
# BglghkgBZQMEAgEFAKCBrjAZBgkqhkiG9w0BCQMxDAYKKwYBBAGCNwIBBDAcBgor
|
||||
# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQgpT/bxWwe
|
||||
# aW0EinKMWCAzDXUjwXkIHldYzR6lw4/1Pc0wQgYKKwYBBAGCNwIBDDE0MDKgFIAS
|
||||
# AE0AaQBjAHIAbwBzAG8AZgB0oRqAGGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbTAN
|
||||
# BgkqhkiG9w0BAQEFAASCAQCHd7sSQVq0YDg8QDx6/kLWn3s6jtvvIDCCgsO9spHM
|
||||
# quPd4FPbG67DCsKDClekQs52qrtRO3Zo+JMnCw4j3bS+gZHzeJr2shbftOrpsFoD
|
||||
# l7OPcUmtrqul9dkQCOp8t0MP3ls0n96/YyNy6lz4BAlTdkdDx957uAxalKaCIBzb
|
||||
# R9QyppOKIfNFvwD4EI5KI6tpmSy/uH8SrRg7ZExAYZl6J6R18WkL7KHn649lPoAQ
|
||||
# ujwrIXH10xOJops45ILGzKWQcHmCzLJGYapL4VHUuK+73nT+9ZROGHdk/PyvIcdw
|
||||
# iERa+C06v305t3DA+CuHFy1tvyw7IFF6RVbLZPwxrJjToYIS7jCCEuoGCisGAQQB
|
||||
# gjcDAwExghLaMIIS1gYJKoZIhvcNAQcCoIISxzCCEsMCAQMxDzANBglghkgBZQME
|
||||
# AgEFADCCAVUGCyqGSIb3DQEJEAEEoIIBRASCAUAwggE8AgEBBgorBgEEAYRZCgMB
|
||||
# MDEwDQYJYIZIAWUDBAIBBQAEIOCaTmvM1AP0WaEVqzKaaCu/R+bTlR4kCrM/ZXsb
|
||||
# /eNOAgZgGeLsMwsYEzIwMjEwMjAzMjExNzQ5LjU5MVowBIACAfSggdSkgdEwgc4x
|
||||
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1p
|
||||
# Y3Jvc29mdCBPcGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMg
|
||||
# VFNTIEVTTjo4OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgU2VydmljZaCCDkEwggT1MIID3aADAgECAhMzAAABLCKvRZd1+RvuAAAA
|
||||
# AAEsMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
|
||||
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
|
||||
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
|
||||
# MB4XDTE5MTIxOTAxMTUwM1oXDTIxMDMxNzAxMTUwM1owgc4xCzAJBgNVBAYTAlVT
|
||||
# MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQK
|
||||
# ExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVy
|
||||
# YXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4OTdB
|
||||
# LUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vydmlj
|
||||
# ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPK1zgSSq+MxAYo3qpCt
|
||||
# QDxSMPPJy6mm/wfEJNjNUnYtLFBwl1BUS5trEk/t41ldxITKehs+ABxYqo4Qxsg3
|
||||
# Gy1ugKiwHAnYiiekfC+ZhptNFgtnDZIn45zC0AlVr/6UfLtsLcHCh1XElLUHfEC0
|
||||
# nBuQcM/SpYo9e3l1qY5NdMgDGxCsmCKdiZfYXIu+U0UYIBhdzmSHnB3fxZOBVcr5
|
||||
# htFHEBBNt/rFJlm/A4yb8oBsp+Uf0p5QwmO/bCcdqB15JpylOhZmWs0sUfJKlK9E
|
||||
# rAhBwGki2eIRFKsQBdkXS9PWpF1w2gIJRvSkDEaCf+lbGTPdSzHSbfREWOF9wY3i
|
||||
# Yj8CAwEAAaOCARswggEXMB0GA1UdDgQWBBRRahZSGfrCQhCyIyGH9DkiaW7L0zAf
|
||||
# BgNVHSMEGDAWgBTVYzpcijGQ80N7fEYbxTNoWoVtVTBWBgNVHR8ETzBNMEugSaBH
|
||||
# hkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNU
|
||||
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcmwwWgYIKwYBBQUHAQEETjBMMEoGCCsGAQUF
|
||||
# BzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1RpbVN0
|
||||
# YVBDQV8yMDEwLTA3LTAxLmNydDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoGCCsG
|
||||
# AQUFBwMIMA0GCSqGSIb3DQEBCwUAA4IBAQBPFxHIwi4vAH49w9Svmz6K3tM55RlW
|
||||
# 5pPeULXdut2Rqy6Ys0+VpZsbuaEoxs6Z1C3hMbkiqZFxxyltxJpuHTyGTg61zfNI
|
||||
# F5n6RsYF3s7IElDXNfZznF1/2iWc6uRPZK8rxxUJ/7emYXZCYwuUY0XjsCpP9pbR
|
||||
# RKeJi6r5arSyI+NfKxvgoM21JNt1BcdlXuAecdd/k8UjxCscffanoK2n6LFw1PcZ
|
||||
# lEO7NId7o+soM2C0QY5BYdghpn7uqopB6ixyFIIkDXFub+1E7GmAEwfU6VwEHL7y
|
||||
# 9rNE8bd+JrQs+yAtkkHy9FmXg/PsGq1daVzX1So7CJ6nyphpuHSN3VfTMIIGcTCC
|
||||
# BFmgAwIBAgIKYQmBKgAAAAAAAjANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMC
|
||||
# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV
|
||||
# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJv
|
||||
# b3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTAwHhcNMTAwNzAxMjEzNjU1WhcN
|
||||
# MjUwNzAxMjE0NjU1WjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv
|
||||
# bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0
|
||||
# aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDCCASIw
|
||||
# DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkdDbx3EYo6IOz8E5f1+n9plGt0
|
||||
# VBDVpQoAgoX77XxoSyxfxcPlYcJ2tz5mK1vwFVMnBDEfQRsalR3OCROOfGEwWbEw
|
||||
# RA/xYIiEVEMM1024OAizQt2TrNZzMFcmgqNFDdDq9UeBzb8kYDJYYEbyWEeGMoQe
|
||||
# dGFnkV+BVLHPk0ySwcSmXdFhE24oxhr5hoC732H8RsEnHSRnEnIaIYqvS2SJUGKx
|
||||
# Xf13Hz3wV3WsvYpCTUBR0Q+cBj5nf/VmwAOWRH7v0Ev9buWayrGo8noqCjHw2k4G
|
||||
# kbaICDXoeByw6ZnNPOcvRLqn9NxkvaQBwSAJk3jN/LzAyURdXhacAQVPIk0CAwEA
|
||||
# AaOCAeYwggHiMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBTVYzpcijGQ80N7
|
||||
# fEYbxTNoWoVtVTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMC
|
||||
# AYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTV9lbLj+iiXGJo0T2UkFvX
|
||||
# zpoYxDBWBgNVHR8ETzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20v
|
||||
# cGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcmwwWgYI
|
||||
# KwYBBQUHAQEETjBMMEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNydDCBoAYDVR0g
|
||||
# AQH/BIGVMIGSMIGPBgkrBgEEAYI3LgMwgYEwPQYIKwYBBQUHAgEWMWh0dHA6Ly93
|
||||
# d3cubWljcm9zb2Z0LmNvbS9QS0kvZG9jcy9DUFMvZGVmYXVsdC5odG0wQAYIKwYB
|
||||
# BQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AUABvAGwAaQBjAHkAXwBTAHQAYQB0AGUA
|
||||
# bQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAAfmiFEN4sbgmD+BcQM9naOh
|
||||
# IW+z66bM9TG+zwXiqf76V20ZMLPCxWbJat/15/B4vceoniXj+bzta1RXCCtRgkQS
|
||||
# +7lTjMz0YBKKdsxAQEGb3FwX/1z5Xhc1mCRWS3TvQhDIr79/xn/yN31aPxzymXlK
|
||||
# kVIArzgPF/UveYFl2am1a+THzvbKegBvSzBEJCI8z+0DpZaPWSm8tv0E4XCfMkon
|
||||
# /VWvL/625Y4zu2JfmttXQOnxzplmkIz/amJ/3cVKC5Em4jnsGUpxY517IW3DnKOi
|
||||
# PPp/fZZqkHimbdLhnPkd/DjYlPTGpQqWhqS9nhquBEKDuLWAmyI4ILUl5WTs9/S/
|
||||
# fmNZJQ96LjlXdqJxqgaKD4kWumGnEcua2A5HmoDF0M2n0O99g/DhO3EJ3110mCII
|
||||
# YdqwUB5vvfHhAN/nMQekkzr3ZUd46PioSKv33nJ+YWtvd6mBy6cJrDm77MbL2IK0
|
||||
# cs0d9LiFAR6A+xuJKlQ5slvayA1VmXqHczsI5pgt6o3gMy4SKfXAL1QnIffIrE7a
|
||||
# KLixqduWsqdCosnPGUFN4Ib5KpqjEWYw07t0MkvfY3v1mYovG8chr1m1rtxEPJdQ
|
||||
# cdeh0sVV42neV8HR3jDA/czmTfsNv11P6Z0eGTgvvM9YBS7vDaBQNdrvCScc1bN+
|
||||
# NR4Iuto229Nfj950iEkSoYICzzCCAjgCAQEwgfyhgdSkgdEwgc4xCzAJBgNVBAYT
|
||||
# AlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYD
|
||||
# VQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBP
|
||||
# cGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4
|
||||
# OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vy
|
||||
# dmljZaIjCgEBMAcGBSsOAwIaAxUADE5OKSMoNx/mYxYWap1RTOohbJ2ggYMwgYCk
|
||||
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
|
||||
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIF
|
||||
# AOPFChkwIhgPMjAyMTAyMDMxNTQwMDlaGA8yMDIxMDIwNDE1NDAwOVowdDA6Bgor
|
||||
# BgEEAYRZCgQBMSwwKjAKAgUA48UKGQIBADAHAgEAAgIXmDAHAgEAAgIRyTAKAgUA
|
||||
# 48ZbmQIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIBAAID
|
||||
# B6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBAHeeznL2n6HWCjHH94Fl
|
||||
# hcdW6TEXzq4XNgp1Gx1W9F8gJ4x+SwoV7elJZkwgGffcpHomLvIY/VSuzsl1NgtJ
|
||||
# TWM2UxoqSv58BBOrl4eGhH6kkg8Ucy2tdeK5T8cHa8pMkq2j9pFd2mRG/6VMk0dl
|
||||
# Xz7Uy3Z6bZqkcABMyAfuAaGbMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMCVVMx
|
||||
# EzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoT
|
||||
# FU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgUENBIDIwMTACEzMAAAEsIq9Fl3X5G+4AAAAAASwwDQYJYIZIAWUDBAIB
|
||||
# BQCgggFKMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQx
|
||||
# IgQg/QYv7yp+354WTjWUIsXWndTEzXjaYjqwYjcBxCJKjdUwgfoGCyqGSIb3DQEJ
|
||||
# EAIvMYHqMIHnMIHkMIG9BCBbn/0uFFh42hTM5XOoKdXevBaiSxmYK9Ilcn9nu5ZH
|
||||
# 4TCBmDCBgKR+MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
|
||||
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
|
||||
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABLCKv
|
||||
# RZd1+RvuAAAAAAEsMCIEIIfIM3YbzHswb/Kj/qq1l1cHA6QBl+gEXYanUNJomrpT
|
||||
# MA0GCSqGSIb3DQEBCwUABIIBAAwdcXssUZGO7ho5+NHLjIxLtQk543aKGo+lrRMY
|
||||
# Q9abE1h/AaaNJl0iGxX4IihNWyfovSfYL3L4eODUBAu68tWSxeceRfWNsb/ZZfUi
|
||||
# v89hpLssI/Gf1BEgNMA4zCuIGQiC8okusVumEpAhhvCEbSiTTTtBdolTnU/CAKui
|
||||
# oxaU3R9XkKh1F4oAM26+dJ1J2BLQXPs5afNvvedDsZWNQUPK1sFF3JRfzxiTrwBW
|
||||
# EJRyflev9gyDoqCHzippgb+6+eti1WTkcA9Q49GIT11S6LOAVqkSC9N7Nqf8ksh8
|
||||
# ARdwT8jigpsm+mj7lrVU9upDkhVYhKeO8oiZq95Q53Zkteo=
|
||||
# SIG # End signature block
|
||||
|
||||
246
src/Misc/dotnet-install.sh
vendored
246
src/Misc/dotnet-install.sh
vendored
@@ -40,7 +40,7 @@ if [ -t 1 ] && command -v tput > /dev/null; then
|
||||
fi
|
||||
|
||||
say_warning() {
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3
|
||||
}
|
||||
|
||||
say_err() {
|
||||
@@ -183,6 +183,9 @@ get_current_os_name() {
|
||||
elif is_musl_based_distro; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
elif [ "$linux_platform_name" = "linux-musl" ]; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
else
|
||||
echo "linux"
|
||||
return 0
|
||||
@@ -296,11 +299,11 @@ get_machine_architecture() {
|
||||
if command -v uname > /dev/null; then
|
||||
CPUName=$(uname -m)
|
||||
case $CPUName in
|
||||
armv7l)
|
||||
armv*l)
|
||||
echo "arm"
|
||||
return 0
|
||||
;;
|
||||
aarch64)
|
||||
aarch64|arm64)
|
||||
echo "arm64"
|
||||
return 0
|
||||
;;
|
||||
@@ -341,6 +344,30 @@ get_normalized_architecture_from_architecture() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# args:
|
||||
# user_defined_os - $1
|
||||
get_normalized_os() {
|
||||
eval $invocation
|
||||
|
||||
local osname="$(to_lowercase "$1")"
|
||||
if [ ! -z "$osname" ]; then
|
||||
case "$osname" in
|
||||
osx | freebsd | rhel.6 | linux-musl | linux)
|
||||
echo "$osname"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
else
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
fi
|
||||
echo "$osname"
|
||||
return 0
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
@@ -382,14 +409,12 @@ is_dotnet_package_installed() {
|
||||
# azure_feed - $1
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# coherent - $4
|
||||
get_latest_version_info() {
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local channel="$2"
|
||||
local normalized_architecture="$3"
|
||||
local coherent="$4"
|
||||
|
||||
local version_file_url=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
@@ -397,11 +422,7 @@ get_latest_version_info() {
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
|
||||
elif [ -z "$runtime" ]; then
|
||||
if [ "$coherent" = true ]; then
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
|
||||
else
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
|
||||
fi
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
|
||||
else
|
||||
say_err "Invalid value for \$runtime"
|
||||
return 1
|
||||
@@ -468,26 +489,16 @@ get_specific_version_from_version() {
|
||||
local json_file="$5"
|
||||
|
||||
if [ -z "$json_file" ]; then
|
||||
case "$version" in
|
||||
latest)
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
coherent)
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$version"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
else
|
||||
echo "$version"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
local version_info
|
||||
version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
|
||||
@@ -501,6 +512,7 @@ get_specific_version_from_version() {
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# specific_version - $4
|
||||
# normalized_os - $5
|
||||
construct_download_link() {
|
||||
eval $invocation
|
||||
|
||||
@@ -509,9 +521,7 @@ construct_download_link() {
|
||||
local normalized_architecture="$3"
|
||||
local specific_version="${4//[$'\t\r\n']}"
|
||||
local specific_product_version="$(get_specific_product_version "$1" "$4")"
|
||||
|
||||
local osname
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
local osname="$5"
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
@@ -532,7 +542,7 @@ construct_download_link() {
|
||||
# azure_feed - $1
|
||||
# specific_version - $2
|
||||
get_specific_product_version() {
|
||||
# If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
|
||||
# If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
|
||||
# to resolve the version of what's in the folder, superseding the specified version.
|
||||
eval $invocation
|
||||
|
||||
@@ -692,11 +702,31 @@ extract_dotnet_package() {
|
||||
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
|
||||
|
||||
rm -rf "$temp_out_path"
|
||||
rm -f "$zip_path" && say_verbose "Temporary zip file $zip_path was removed"
|
||||
|
||||
if [ "$failed" = true ]; then
|
||||
say_err "Extraction failed"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_curl() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
|
||||
curl $curl_options "$remote_path_with_credential" || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_wget() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
wget_options="-q -S --spider --tries 5 --waitretry 2 --connect-timeout 15 "
|
||||
wget $wget_options "$remote_path_with_credential" 2>&1 || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
@@ -714,13 +744,30 @@ download() {
|
||||
fi
|
||||
|
||||
local failed=false
|
||||
if machine_has "curl"; then
|
||||
downloadcurl "$remote_path" "$out_path" || failed=true
|
||||
elif machine_has "wget"; then
|
||||
downloadwget "$remote_path" "$out_path" || failed=true
|
||||
else
|
||||
failed=true
|
||||
fi
|
||||
local attempts=0
|
||||
while [ $attempts -lt 3 ]; do
|
||||
attempts=$((attempts+1))
|
||||
failed=false
|
||||
if machine_has "curl"; then
|
||||
downloadcurl "$remote_path" "$out_path" || failed=true
|
||||
elif machine_has "wget"; then
|
||||
downloadwget "$remote_path" "$out_path" || failed=true
|
||||
else
|
||||
say_err "Missing dependency: neither curl nor wget was found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$failed" = false ] || [ $attempts -ge 3 ] || { [ ! -z $http_code ] && [ $http_code = "404" ]; }; then
|
||||
break
|
||||
fi
|
||||
|
||||
say "Download attempt #$attempts has failed: $http_code $download_error_msg"
|
||||
say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds."
|
||||
sleep $((attempts*20))
|
||||
done
|
||||
|
||||
|
||||
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Download failed: $remote_path"
|
||||
return 1
|
||||
@@ -728,44 +775,60 @@ download() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadcurl() {
|
||||
eval $invocation
|
||||
unset http_code
|
||||
unset download_error_msg
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
curl $curl_options "$remote_path" || failed=true
|
||||
curl $curl_options "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
curl $curl_options -o "$out_path" "$remote_path" || failed=true
|
||||
curl $curl_options -o "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Curl download failed"
|
||||
local response=$(get_http_header_curl $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadwget() {
|
||||
eval $invocation
|
||||
unset http_code
|
||||
unset download_error_msg
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
wget -q $wget_options -O - "$remote_path" || failed=true
|
||||
wget -q $wget_options -O - "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
wget $wget_options -O "$out_path" "$remote_path" || failed=true
|
||||
wget $wget_options -O "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Wget download failed"
|
||||
local response=$(get_http_header_wget $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -778,6 +841,9 @@ calculate_vars() {
|
||||
normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
|
||||
say_verbose "normalized_architecture=$normalized_architecture"
|
||||
|
||||
normalized_os="$(get_normalized_os "$user_defined_os")"
|
||||
say_verbose "normalized_os=$normalized_os"
|
||||
|
||||
specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
|
||||
specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
|
||||
say_verbose "specific_version=$specific_version"
|
||||
@@ -786,7 +852,7 @@ calculate_vars() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")"
|
||||
say_verbose "Constructed primary named payload URL: $download_link"
|
||||
|
||||
legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
|
||||
@@ -831,38 +897,74 @@ install_dotnet() {
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Zip path: $zip_path"
|
||||
|
||||
say "Downloading link: $download_link"
|
||||
|
||||
# Failures are normal in the non-legacy case for ultimately legacy downloads.
|
||||
# Do not output to stderr, since output to stderr is considered an error.
|
||||
say "Downloading primary link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
|
||||
# if the download fails, download the legacy_download_link
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
|
||||
primary_path_http_code="$http_code"; primary_path_download_error_msg="$download_error_msg"
|
||||
case $primary_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$primary_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
download_failed=false
|
||||
download_link="$legacy_download_link"
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Legacy zip path: $zip_path"
|
||||
say "Downloading legacy link: $download_link"
|
||||
|
||||
say "Downloading legacy link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
legacy_path_http_code="$http_code"; legacy_path_download_error_msg="$download_error_msg"
|
||||
case $legacy_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$legacy_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
if [[ "$primary_path_http_code" = "404" && ( "$valid_legacy_download_link" = false || "$legacy_path_http_code" = "404") ]]; then
|
||||
say_err "Could not find \`$asset_name\` with version = $specific_version"
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
else
|
||||
say_err "Could not download: \`$asset_name\` with version = $specific_version"
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if [ "$primary_path_http_code" != "404" ]; then
|
||||
say_err "$primary_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$valid_legacy_download_link" = true && "$legacy_path_http_code" != "404" ]]; then
|
||||
say_err "$legacy_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
say "Extracting zip from $download_link"
|
||||
extract_dotnet_package "$zip_path" "$install_root"
|
||||
extract_dotnet_package "$zip_path" "$install_root" || return 1
|
||||
|
||||
# Check if the SDK version is installed; if not, fail the installation.
|
||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||
@@ -883,6 +985,8 @@ install_dotnet() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
|
||||
return 1
|
||||
}
|
||||
@@ -909,6 +1013,7 @@ runtime=""
|
||||
runtime_id=""
|
||||
override_non_versioned_files=true
|
||||
non_dynamic_parameters=""
|
||||
user_defined_os=""
|
||||
|
||||
while [ $# -ne 0 ]
|
||||
do
|
||||
@@ -930,6 +1035,10 @@ do
|
||||
shift
|
||||
architecture="$1"
|
||||
;;
|
||||
--os|-[Oo][SS])
|
||||
shift
|
||||
user_defined_os="$1"
|
||||
;;
|
||||
--shared-runtime|-[Ss]hared[Rr]untime)
|
||||
say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
|
||||
if [ -z "$runtime" ]; then
|
||||
@@ -981,6 +1090,7 @@ do
|
||||
shift
|
||||
runtime_id="$1"
|
||||
non_dynamic_parameters+=" $name "\""$1"\"""
|
||||
say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead."
|
||||
;;
|
||||
--jsonfile|-[Jj][Ss]on[Ff]ile)
|
||||
shift
|
||||
@@ -1013,8 +1123,6 @@ do
|
||||
echo " -Version"
|
||||
echo " Possible values:"
|
||||
echo " - latest - most latest build on specific channel"
|
||||
echo " - coherent - most latest coherent build on specific channel"
|
||||
echo " coherent applies only to SDK downloads"
|
||||
echo " - 3-part version in a format A.B.C - represents specific version of build"
|
||||
echo " examples: 2.0.0-preview2-006120; 1.1.0"
|
||||
echo " -i,--install-dir <DIR> Install under specified location (see Install Location below)"
|
||||
@@ -1022,6 +1130,11 @@ do
|
||||
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
||||
echo " --arch,-Architecture,-Arch"
|
||||
echo " Possible values: x64, arm, and arm64"
|
||||
echo " --os <system> Specifies operating system to be used when selecting the installer."
|
||||
echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
|
||||
echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
|
||||
echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links."
|
||||
echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information."
|
||||
echo " --runtime <RUNTIME> Installs a shared runtime only, without the SDK."
|
||||
echo " -Runtime"
|
||||
echo " Possible values:"
|
||||
@@ -1038,14 +1151,15 @@ do
|
||||
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
||||
echo " --jsonfile <JSONFILE> Determines the SDK version from a user specified global.json file."
|
||||
echo " Note: global.json must have a value for 'SDK:Version'"
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId"
|
||||
echo " -?,--?,-h,--help,-Help Shows this help message"
|
||||
echo ""
|
||||
echo "Obsolete parameters:"
|
||||
echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
|
||||
echo " This parameter is obsolete and may be removed in a future version of this script."
|
||||
echo " Installs just the shared runtime bits, not the entire SDK."
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId" The parameter is obsolete and may be removed in a future version of this script. Should be used only for versions below 2.1.
|
||||
echo " For primary links to override OS or/and architecture, use --os and --architecture option instead."
|
||||
echo ""
|
||||
echo "Install Location:"
|
||||
echo " Location is chosen in following order:"
|
||||
@@ -1082,7 +1196,7 @@ if [ "$dry_run" = true ]; then
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
say "Legacy named payload URL: $legacy_download_link"
|
||||
fi
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
repeatable_command+=" --runtime "\""dotnet"\"""
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
|
||||
@@ -16,11 +16,11 @@ if (supported.indexOf(process.platform) == -1) {
|
||||
var stopping = false;
|
||||
var listener = null;
|
||||
|
||||
var runService = function() {
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
|
||||
if(!stopping) {
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log('Starting Runner listener interactively');
|
||||
@@ -30,8 +30,8 @@ var runService = function() {
|
||||
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
|
||||
}
|
||||
|
||||
console.log('Started listener process');
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
@@ -40,6 +40,10 @@ var runService = function() {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on('close', (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
@@ -56,13 +60,13 @@ var runService = function() {
|
||||
} else {
|
||||
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
|
||||
}
|
||||
|
||||
if(!stopping) {
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
@@ -71,14 +75,15 @@ var runService = function() {
|
||||
runService();
|
||||
console.log('Started running service');
|
||||
|
||||
var gracefulShutdown = function(code) {
|
||||
var gracefulShutdown = function (code) {
|
||||
console.log('Shutting down runner listener');
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
console.log('Sending SIGINT to runner listener to stop');
|
||||
listener.kill('SIGINT');
|
||||
|
||||
// TODO wait for 30 seconds and send a SIGKILL
|
||||
console.log('Sending SIGKILL to runner listener');
|
||||
setTimeout(() => listener.kill('SIGKILL'), 30000);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
@@ -0,0 +1,115 @@
|
||||
const https = require('https')
|
||||
const fs = require('fs')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
},
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
let cert = socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const auth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
|
||||
const options = {
|
||||
host: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`,
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': auth,
|
||||
}
|
||||
}
|
||||
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: '/',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
}
|
||||
}, (res) => {
|
||||
let cert = res.socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
@@ -0,0 +1,75 @@
|
||||
const https = require('https')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const proxyAuth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
const options = {
|
||||
hostname: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': proxyAuth,
|
||||
}
|
||||
}
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: path,
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}, (res) => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
@@ -106,25 +106,37 @@ function stop()
|
||||
|
||||
function uninstall()
|
||||
{
|
||||
stop
|
||||
systemctl disable ${SVC_NAME} || failed "failed to disable ${SVC_NAME}"
|
||||
rm "${UNIT_PATH}" || failed "failed to delete ${UNIT_PATH}"
|
||||
if service_exists; then
|
||||
stop
|
||||
systemctl disable ${SVC_NAME} || failed "failed to disable ${SVC_NAME}"
|
||||
rm "${UNIT_PATH}" || failed "failed to delete ${UNIT_PATH}"
|
||||
else
|
||||
echo "Service ${SVC_NAME} is not installed"
|
||||
fi
|
||||
if [ -f "${CONFIG_PATH}" ]; then
|
||||
rm "${CONFIG_PATH}" || failed "failed to delete ${CONFIG_PATH}"
|
||||
fi
|
||||
systemctl daemon-reload || failed "failed to reload daemons"
|
||||
}
|
||||
|
||||
function service_exists() {
|
||||
if [ -f "${UNIT_PATH}" ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function status()
|
||||
{
|
||||
if [ -f "${UNIT_PATH}" ]; then
|
||||
if service_exists; then
|
||||
echo
|
||||
echo "${UNIT_PATH}"
|
||||
else
|
||||
echo
|
||||
echo "not installed"
|
||||
echo
|
||||
return
|
||||
exit 1
|
||||
fi
|
||||
|
||||
systemctl --no-pager status ${SVC_NAME}
|
||||
|
||||
@@ -6,6 +6,7 @@ varCheckList=(
|
||||
'ANT_HOME'
|
||||
'M2_HOME'
|
||||
'ANDROID_HOME'
|
||||
'ANDROID_SDK_ROOT'
|
||||
'GRADLE_HOME'
|
||||
'NVM_BIN'
|
||||
'NVM_PATH'
|
||||
|
||||
@@ -26,25 +26,23 @@ if [[ "$1" == "localRun" ]]; then
|
||||
else
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
# Return code 4 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish and run the runner again.
|
||||
# Return code 3 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 4 ]]; then
|
||||
if [[ $returnCode == 3 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" >nul
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -n 5 127.0.0.1 >nul
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5 >nul
|
||||
sleep 5
|
||||
fi
|
||||
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
else
|
||||
exit $returnCode
|
||||
fi
|
||||
|
||||
@@ -41,6 +41,8 @@ namespace GitHub.Runner.Common
|
||||
public static string PluginTracePrefix = "##[plugin.trace]";
|
||||
public static readonly int RunnerDownloadRetryMaxAttempts = 3;
|
||||
|
||||
public static readonly int CompositeActionsMaxDepth = 9;
|
||||
|
||||
// This enum is embedded within the Constants class to make it easier to reference and avoid
|
||||
// ambiguous type reference with System.Runtime.InteropServices.OSPlatform and System.Runtime.InteropServices.Architecture
|
||||
public enum OSPlatform
|
||||
@@ -99,9 +101,11 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// Secret args. Must be added to the "Secrets" getter as well.
|
||||
public static readonly string Token = "token";
|
||||
public static readonly string PAT = "pat";
|
||||
public static readonly string WindowsLogonPassword = "windowslogonpassword";
|
||||
public static string[] Secrets => new[]
|
||||
{
|
||||
PAT,
|
||||
Token,
|
||||
WindowsLogonPassword,
|
||||
};
|
||||
@@ -119,6 +123,7 @@ namespace GitHub.Runner.Common
|
||||
//validFlags array as well present in the CommandSettings.cs
|
||||
public static class Flags
|
||||
{
|
||||
public static readonly string Check = "check";
|
||||
public static readonly string Commit = "commit";
|
||||
public static readonly string Help = "help";
|
||||
public static readonly string Replace = "replace";
|
||||
@@ -138,8 +143,14 @@ namespace GitHub.Runner.Common
|
||||
public const int RunOnceRunnerUpdating = 4;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
{
|
||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
public static readonly string WorkerCrash = "WORKER_CRASH";
|
||||
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
|
||||
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
|
||||
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
|
||||
}
|
||||
|
||||
@@ -51,6 +51,7 @@ namespace GitHub.Runner.Common
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.RemoveMatcherCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.WarningCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.ErrorCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.NoticeCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.DebugCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.GroupCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
|
||||
@@ -60,6 +61,12 @@ namespace GitHub.Runner.Common
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Listener.Check.ICheckExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.ActionsCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.GitCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.NodeJsCheck, Runner.Listener");
|
||||
break;
|
||||
default:
|
||||
// This should never happen.
|
||||
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");
|
||||
|
||||
@@ -84,6 +84,7 @@ namespace GitHub.Runner.Common
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift1);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift2);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.CommandLineArgumentEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.ExpressionStringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
|
||||
|
||||
@@ -544,6 +544,11 @@ namespace GitHub.Runner.Common
|
||||
timelineRecord.WarningCount = rec.WarningCount;
|
||||
}
|
||||
|
||||
if (rec.NoticeCount != null && rec.NoticeCount > 0)
|
||||
{
|
||||
timelineRecord.NoticeCount = rec.NoticeCount;
|
||||
}
|
||||
|
||||
if (rec.Issues.Count > 0)
|
||||
{
|
||||
timelineRecord.Issues.Clear();
|
||||
|
||||
@@ -101,7 +101,7 @@ namespace GitHub.Runner.Common
|
||||
EndPage();
|
||||
_byteCount = 0;
|
||||
_dataFileName = Path.Combine(_pagesFolder, $"{_timelineId}_{_timelineRecordId}_{++_pageCount}.log");
|
||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
|
||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
|
||||
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.IO;
|
||||
using System.IO.Pipes;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -68,6 +69,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
public async Task SendAsync(MessageType messageType, string body, CancellationToken cancellationToken)
|
||||
{
|
||||
Trace.Info($"Sending message of length {body.Length}, with hash '{IOUtil.GetSha256Hash(body)}'");
|
||||
await _writeStream.WriteInt32Async((int)messageType, cancellationToken);
|
||||
await _writeStream.WriteStringAsync(body, cancellationToken);
|
||||
}
|
||||
@@ -77,6 +79,7 @@ namespace GitHub.Runner.Common
|
||||
WorkerMessage result = new WorkerMessage(MessageType.NotInitialized, string.Empty);
|
||||
result.MessageType = (MessageType)await _readStream.ReadInt32Async(cancellationToken);
|
||||
result.Body = await _readStream.ReadStringAsync(cancellationToken);
|
||||
Trace.Info($"Receiving message of length {result.Body.Length}, with hash '{IOUtil.GetSha256Hash(result.Body)}'");
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@@ -45,8 +45,8 @@ namespace GitHub.Runner.Common
|
||||
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
|
||||
|
||||
// agent package
|
||||
Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken);
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken);
|
||||
Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, bool includeToken, CancellationToken cancellationToken);
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
|
||||
@@ -317,16 +317,16 @@ namespace GitHub.Runner.Common
|
||||
//-----------------------------------------------------------------
|
||||
// Agent Package
|
||||
//-----------------------------------------------------------------
|
||||
public Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken)
|
||||
public Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, bool includeToken, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.GetPackagesAsync(packageType, platform, top, cancellationToken: cancellationToken);
|
||||
return _genericTaskAgentClient.GetPackagesAsync(packageType, platform, top, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken)
|
||||
public Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, cancellationToken: cancellationToken);
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState)
|
||||
|
||||
93
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
93
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
@@ -0,0 +1,93 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class ActionsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 2;
|
||||
|
||||
public string CheckName => "GitHub Actions Connection";
|
||||
|
||||
public string CheckDescription => "Check if the Actions runner has access to the GitHub Actions service.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/actions.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(ActionsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// runner access to actions service
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/vstoken/_apis/health";
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
}
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsTokenServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
// check actions pipelines service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
// check HTTP POST to actions pipelines service
|
||||
checkTasks.Add(HostContext.CheckHttpsPostRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
417
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
417
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
@@ -0,0 +1,417 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Tracing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.NetworkInformation;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public static class CheckUtil
|
||||
{
|
||||
public static List<string> WarnLog(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** DO NOT share the log in public place! The log may contains secrets in plain text. ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static List<string> CheckProxy(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
if (!string.IsNullOrEmpty(hostContext.WebProxy.HttpProxyAddress) ||
|
||||
!string.IsNullOrEmpty(hostContext.WebProxy.HttpsProxyAddress))
|
||||
{
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** Runner is behind web proxy {hostContext.WebProxy.HttpsProxyAddress ?? hostContext.WebProxy.HttpProxyAddress} ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckDns(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try DNS lookup for {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
IPHostEntry host = await Dns.GetHostEntryAsync(url.Host);
|
||||
foreach (var address in host.AddressList)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Resolved DNS for {url.Host} to '{address}'");
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Resolved DNS for {url.Host} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckPing(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try ping {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var ping = new Ping())
|
||||
{
|
||||
var reply = await ping.SendPingAsync(url.Host);
|
||||
if (reply.Status == IPStatus.Success)
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) succeed within to '{reply.RoundtripTime} ms'");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) failed with '{reply.Status}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Ping api.github.com failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckHttpsGetRequests(this IHostContext hostContext, string url, string pat, string expectedHeader)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Send HTTPS Request (GET) to {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var _ = new HttpEventSourceListener(result.Logs))
|
||||
using (var httpClientHandler = hostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(hostContext.UserAgents);
|
||||
if (!string.IsNullOrEmpty(pat))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", pat);
|
||||
}
|
||||
|
||||
var response = await httpClient.GetAsync(url);
|
||||
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http status code: {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response headers: {response.Headers}");
|
||||
|
||||
var responseContent = await response.Content.ReadAsStringAsync();
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response body: {responseContent}");
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
if (response.Headers.Contains(expectedHeader))
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed but doesn't have expected HTTP response Header '{expectedHeader}'.");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} failed with {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request 'GET' to {url} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckHttpsPostRequests(this IHostContext hostContext, string url, string pat, string expectedHeader)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Send HTTPS Request (POST) to {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var _ = new HttpEventSourceListener(result.Logs))
|
||||
using (var httpClientHandler = hostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(hostContext.UserAgents);
|
||||
if (!string.IsNullOrEmpty(pat))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", pat);
|
||||
}
|
||||
|
||||
// Send empty JSON '{}' to service
|
||||
var response = await httpClient.PostAsJsonAsync<Dictionary<string, string>>(url, new Dictionary<string, string>());
|
||||
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http status code: {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response headers: {response.Headers}");
|
||||
|
||||
var responseContent = await response.Content.ReadAsStringAsync();
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response body: {responseContent}");
|
||||
if (response.Headers.Contains(expectedHeader))
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'POST' to {url} has expected HTTP response header");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'POST' to {url} doesn't have expected HTTP response Header '{expectedHeader}'.");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request 'POST' to {url} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> DownloadExtraCA(this IHostContext hostContext, string url, string pat)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
var uri = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", uri.Host },
|
||||
{ "PORT", uri.IsDefaultPort ? (uri.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : uri.Port.ToString() },
|
||||
{ "PATH", uri.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = hostContext.WebProxy.GetProxy(uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (hostContext.WebProxy.HttpProxyUsername != null ||
|
||||
hostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = hostContext.WebProxy.HttpProxyUsername ?? hostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = hostContext.WebProxy.HttpProxyPassword ?? hostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
using (var processInvoker = hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
|
||||
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
hostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{downloadCertScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from '{url}' failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// EventSource listener for dotnet debug trace for HTTP and SSL
|
||||
public sealed class HttpEventSourceListener : EventListener
|
||||
{
|
||||
private readonly List<string> _logs;
|
||||
private readonly object _lock = new object();
|
||||
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new Dictionary<string, HashSet<string>>
|
||||
{
|
||||
{
|
||||
"Microsoft-System-Net-Http",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Info",
|
||||
"Associate",
|
||||
"Enter",
|
||||
"Exit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Microsoft-System-Net-Security",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Enter",
|
||||
"Exit",
|
||||
"Info",
|
||||
"DumpBuffer",
|
||||
"SslStreamCtor",
|
||||
"SecureChannelCtor",
|
||||
"NoDelegateNoClientCert",
|
||||
"CertsAfterFiltering",
|
||||
"UsingCachedCredential",
|
||||
"SspiSelectedCipherSuite"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public HttpEventSourceListener(List<string> logs)
|
||||
{
|
||||
_logs = logs;
|
||||
if (Environment.GetEnvironmentVariable("ACTIONS_RUNNER_TRACE_ALL_HTTP_EVENT") == "1")
|
||||
{
|
||||
_ignoredEvent.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventSourceCreated(EventSource eventSource)
|
||||
{
|
||||
base.OnEventSourceCreated(eventSource);
|
||||
|
||||
if (eventSource.Name == "Microsoft-System-Net-Http" ||
|
||||
eventSource.Name == "Microsoft-System-Net-Security")
|
||||
{
|
||||
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventWritten(EventWrittenEventArgs eventData)
|
||||
{
|
||||
base.OnEventWritten(eventData);
|
||||
lock (_lock)
|
||||
{
|
||||
if (_ignoredEvent.TryGetValue(eventData.EventSource.Name, out var ignored) &&
|
||||
ignored.Contains(eventData.EventName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [START {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
_logs.AddRange(eventData.Payload.Select(x => string.Join(Environment.NewLine, x.ToString().Split(Environment.NewLine).Select(y => $"{DateTime.UtcNow.ToString("O")} {y}"))));
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [END {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
@@ -0,0 +1,171 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class GitCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
private string _gitPath = null;
|
||||
|
||||
public int Order => 3;
|
||||
|
||||
public string CheckName => "Git Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Check if the Git CLI can access GitHub.com or GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/git.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(GitCheck), DateTime.UtcNow));
|
||||
_gitPath = WhichUtil.Which("git");
|
||||
}
|
||||
|
||||
// git access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
if (string.IsNullOrEmpty(_gitPath))
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Can't verify git with GitHub.com or GitHub Enterprise Server since git is not installed." });
|
||||
return false;
|
||||
}
|
||||
|
||||
var checkGit = await CheckGit(url, pat);
|
||||
var result = checkGit.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkGit.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkGit.SslError)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Try fix SSL error by providing extra CA certificate." });
|
||||
var downloadCert = await HostContext.DownloadExtraCA(url, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckGit = await CheckGit(url, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckGit.Logs);
|
||||
if (recheckGit.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckGit(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Validate server cert and proxy configuration with Git ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
var repoUrlBuilder = new UriBuilder(url);
|
||||
repoUrlBuilder.Path = "actions/checkout";
|
||||
repoUrlBuilder.UserName = "gh";
|
||||
repoUrlBuilder.Password = pat;
|
||||
|
||||
var gitProxy = "";
|
||||
var proxy = HostContext.WebProxy.GetProxy(repoUrlBuilder.Uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Runner is behind http proxy '{proxy.AbsoluteUri}'");
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
var proxyUrlWithCred = UrlUtil.GetCredentialEmbeddedUrl(
|
||||
proxy,
|
||||
HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername,
|
||||
HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword);
|
||||
gitProxy = $"-c http.proxy={proxyUrlWithCred}";
|
||||
}
|
||||
else
|
||||
{
|
||||
gitProxy = $"-c http.proxy={proxy.AbsoluteUri}";
|
||||
}
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var gitArgs = $"{gitProxy} ls-remote --exit-code {repoUrlBuilder.Uri.AbsoluteUri} HEAD";
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run 'git {gitArgs}' ");
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
{ "GIT_TRACE", "1" },
|
||||
{ "GIT_CURL_VERBOSE", "1" }
|
||||
};
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["GIT_SSL_CAINFO"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
_gitPath,
|
||||
gitArgs,
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("SSL Certificate problem", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public interface ICheckExtension : IExtension
|
||||
{
|
||||
int Order { get; }
|
||||
string CheckName { get; }
|
||||
string CheckDescription { get; }
|
||||
string CheckLog { get; }
|
||||
string HelpLink { get; }
|
||||
Task<bool> RunCheck(string url, string pat);
|
||||
}
|
||||
|
||||
public class CheckResult
|
||||
{
|
||||
public CheckResult()
|
||||
{
|
||||
Logs = new List<string>();
|
||||
}
|
||||
|
||||
public bool Pass { get; set; }
|
||||
|
||||
public bool SslError { get; set; }
|
||||
|
||||
public List<string> Logs { get; set; }
|
||||
}
|
||||
}
|
||||
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class InternetCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 1;
|
||||
|
||||
public string CheckName => "Internet Connection";
|
||||
|
||||
public string CheckDescription => "Check if the Actions runner has internet access.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/internet.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(InternetCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// check runner access to api.github.com
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
checkTasks.Add(CheckUtil.CheckDns("https://api.github.com"));
|
||||
checkTasks.Add(CheckUtil.CheckPing("https://api.github.com"));
|
||||
|
||||
// We don't need to pass a PAT since it might be a token for GHES.
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests("https://api.github.com", pat: null, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
@@ -0,0 +1,181 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class NodeJsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 4;
|
||||
|
||||
public string CheckName => "Node.js Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Check if Node.js has access to GitHub.com or GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/nodejs.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(NodeJsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// node access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
// Request to github.com or ghes server
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
}
|
||||
|
||||
var checkNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
var result = checkNode.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkNode.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkNode.SslError)
|
||||
{
|
||||
var downloadCert = await HostContext.DownloadExtraCA(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckNode.Logs);
|
||||
if (recheckNode.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckNodeJs(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make Http request to {url} using node.js ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
// Request to github.com or ghes server
|
||||
Uri requestUrl = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", requestUrl.Host },
|
||||
{ "PORT", requestUrl.IsDefaultPort ? (requestUrl.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : requestUrl.Port.ToString() },
|
||||
{ "PATH", requestUrl.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = HostContext.WebProxy.GetProxy(requestUrl);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["NODE_EXTRA_CA_CERTS"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
|
||||
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{makeWebRequestScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make https request to {url} using node.js failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("UNABLE_TO_VERIFY_LEAF_SIGNATURE") ||
|
||||
x.Contains("UNABLE_TO_GET_ISSUER_CERT_LOCALLY") ||
|
||||
x.Contains("SELF_SIGNED_CERT_IN_CHAIN")))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
private readonly string[] validFlags =
|
||||
{
|
||||
Constants.Runner.CommandLine.Flags.Check,
|
||||
Constants.Runner.CommandLine.Flags.Commit,
|
||||
Constants.Runner.CommandLine.Flags.Help,
|
||||
Constants.Runner.CommandLine.Flags.Replace,
|
||||
@@ -42,6 +43,7 @@ namespace GitHub.Runner.Listener
|
||||
Constants.Runner.CommandLine.Args.Labels,
|
||||
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
||||
Constants.Runner.CommandLine.Args.Name,
|
||||
Constants.Runner.CommandLine.Args.PAT,
|
||||
Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||
Constants.Runner.CommandLine.Args.StartupType,
|
||||
Constants.Runner.CommandLine.Args.Token,
|
||||
@@ -59,6 +61,7 @@ namespace GitHub.Runner.Listener
|
||||
public bool Warmup => TestCommand(Constants.Runner.CommandLine.Commands.Warmup);
|
||||
|
||||
// Flags.
|
||||
public bool Check => TestFlag(Constants.Runner.CommandLine.Flags.Check);
|
||||
public bool Commit => TestFlag(Constants.Runner.CommandLine.Flags.Commit);
|
||||
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
|
||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||
@@ -187,6 +190,22 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetGitHubPersonalAccessToken(bool required = false)
|
||||
{
|
||||
if (required)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.PAT,
|
||||
description: "What is your GitHub personal access token?",
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
else
|
||||
{
|
||||
return GetArg(name: Constants.Runner.CommandLine.Args.PAT);
|
||||
}
|
||||
}
|
||||
|
||||
public string GetRunnerRegisterToken()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
|
||||
@@ -4,7 +4,6 @@ using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
@@ -12,6 +11,7 @@ using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
@@ -53,7 +53,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Info(nameof(LoadSettings));
|
||||
if (!IsConfigured())
|
||||
{
|
||||
throw new InvalidOperationException("Not configured");
|
||||
throw new InvalidOperationException("Not configured. Run config.(sh/cmd) to configure the runner.");
|
||||
}
|
||||
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
@@ -107,8 +107,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
runnerSettings.GitHubUrl = inputUrl;
|
||||
var githubToken = command.GetRunnerRegisterToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
|
||||
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
@@ -117,7 +117,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
try
|
||||
{
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || UrlUtil.IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||
|
||||
// Warn if the Actions server url and GHES server url has different Host
|
||||
if (!runnerSettings.IsHostedServer)
|
||||
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
|
||||
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
|
||||
|
||||
if (agentPools?.Where(x => !x.IsHosted).Count() > 1)
|
||||
if (agentPools?.Where(x => !x.IsHosted).Count() > 0)
|
||||
{
|
||||
poolName = command.GetRunnerGroupName(defaultPool?.Name);
|
||||
_term.WriteLine();
|
||||
@@ -374,8 +374,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
else
|
||||
{
|
||||
var githubToken = command.GetRunnerDeletionToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -509,18 +509,107 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return agent;
|
||||
}
|
||||
|
||||
private bool IsHostedServer(UriBuilder gitHubUrl)
|
||||
private async Task<string> GetRunnerTokenAsync(CommandSettings command, string githubUrl, string tokenType)
|
||||
{
|
||||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||
var githubPAT = command.GetGitHubPersonalAccessToken();
|
||||
var runnerToken = string.Empty;
|
||||
if (!string.IsNullOrEmpty(githubPAT))
|
||||
{
|
||||
Trace.Info($"Retriving runner {tokenType} token using GitHub PAT.");
|
||||
var jitToken = await GetJITRunnerTokenAsync(githubUrl, githubPAT, tokenType);
|
||||
Trace.Info($"Retrived runner {tokenType} token is good to {jitToken.ExpiresAt}.");
|
||||
HostContext.SecretMasker.AddValue(jitToken.Token);
|
||||
runnerToken = jitToken.Token;
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(runnerToken))
|
||||
{
|
||||
if (string.Equals("registration", tokenType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
runnerToken = command.GetRunnerRegisterToken();
|
||||
}
|
||||
else
|
||||
{
|
||||
runnerToken = command.GetRunnerDeletionToken();
|
||||
}
|
||||
}
|
||||
|
||||
return runnerToken;
|
||||
}
|
||||
|
||||
private async Task<GitHubRunnerRegisterToken> GetJITRunnerTokenAsync(string githubUrl, string githubToken, string tokenType)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
var repoScope = "repos/";
|
||||
if (string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
repoScope = "";
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
if (IsHostedServer(gitHubUrlBuilder))
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
|
||||
}
|
||||
|
||||
@@ -71,6 +71,16 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubRunnerRegisterToken
|
||||
{
|
||||
[DataMember(Name = "token")]
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "expires_at")]
|
||||
public string ExpiresAt { get; set; }
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubAuthResult
|
||||
{
|
||||
|
||||
@@ -87,7 +87,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
public string GetUniqueRunnerGroupName()
|
||||
{
|
||||
return RunnerServiceLocalGroupPrefix + IOUtil.GetPathHash(HostContext.GetDirectory(WellKnownDirectory.Bin)).Substring(0, 5);
|
||||
return RunnerServiceLocalGroupPrefix + IOUtil.GetSha256Hash(HostContext.GetDirectory(WellKnownDirectory.Bin)).Substring(0, 5);
|
||||
}
|
||||
|
||||
public bool LocalGroupExists(string groupName)
|
||||
|
||||
@@ -27,11 +27,11 @@ namespace GitHub.Runner.Listener
|
||||
Task ShutdownAsync();
|
||||
}
|
||||
|
||||
// This implementation of IDobDispatcher is not thread safe.
|
||||
// It is base on the fact that the current design of runner is dequeue
|
||||
// and process one message from message queue everytime.
|
||||
// In addition, it only execute one job every time,
|
||||
// and server will not send another job while this one is still running.
|
||||
// This implementation of IJobDispatcher is not thread safe.
|
||||
// It is based on the fact that the current design of the runner is a dequeue
|
||||
// and processes one message from the message queue at a time.
|
||||
// In addition, it only executes one job every time,
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
private readonly Lazy<Dictionary<long, TaskResult>> _localRunJobResult = new Lazy<Dictionary<long, TaskResult>>();
|
||||
@@ -43,8 +43,8 @@ namespace GitHub.Runner.Listener
|
||||
private readonly Queue<Guid> _jobDispatchedQueue = new Queue<Guid>();
|
||||
private readonly ConcurrentDictionary<Guid, WorkerDispatcher> _jobInfos = new ConcurrentDictionary<Guid, WorkerDispatcher>();
|
||||
|
||||
//allow up to 30sec for any data to be transmitted over the process channel
|
||||
//timeout limit can be overwrite by environment GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT
|
||||
// allow up to 30sec for any data to be transmitted over the process channel
|
||||
// timeout limit can be overwritten by environment GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT
|
||||
private TimeSpan _channelTimeout;
|
||||
|
||||
private TaskCompletionSource<bool> _runOnceJobCompleted = new TaskCompletionSource<bool>();
|
||||
@@ -64,7 +64,7 @@ namespace GitHub.Runner.Listener
|
||||
channelTimeoutSeconds = 30;
|
||||
}
|
||||
|
||||
// _channelTimeout should in range [30, 300] seconds
|
||||
// _channelTimeout should be in range [30, 300] seconds
|
||||
_channelTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(channelTimeoutSeconds, 30), 300));
|
||||
Trace.Info($"Set runner/worker IPC timeout to {_channelTimeout.TotalSeconds} seconds.");
|
||||
}
|
||||
@@ -230,16 +230,27 @@ namespace GitHub.Runner.Listener
|
||||
return;
|
||||
}
|
||||
|
||||
// base on the current design, server will only send one job for a given runner everytime.
|
||||
// if the runner received a new job request while a previous job request is still running, this typically indicate two situations
|
||||
// 1. an runner bug cause server and runner mismatch on the state of the job request, ex. runner not renew jobrequest properly but think it still own the job reqest, however server already abandon the jobrequest.
|
||||
// 2. a server bug or design change that allow server send more than one job request to an given runner that haven't finish previous job request.
|
||||
// based on the current design, server will only send one job for a given runner at a time.
|
||||
// if the runner received a new job request while a previous job request is still running, this typically indicates two situations
|
||||
// 1. a runner bug caused a server and runner mismatch on the state of the job request, e.g. the runner didn't renew the jobrequest
|
||||
// properly but thinks it still owns the job reqest, however the server has already abandoned the jobrequest.
|
||||
// 2. a server bug or design change that allowed the server to send more than one job request to an given runner that hasn't finished
|
||||
//. a previous job request.
|
||||
var runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
TaskAgentJobRequest request = null;
|
||||
try
|
||||
{
|
||||
request = await runnerServer.GetAgentRequestAsync(_poolId, jobDispatch.RequestId, CancellationToken.None);
|
||||
}
|
||||
catch (TaskAgentJobNotFoundException ex)
|
||||
{
|
||||
Trace.Error($"Catch job-not-found exception while checking jobrequest {jobDispatch.JobId} status. Cancel running worker right away.");
|
||||
Trace.Error(ex);
|
||||
jobDispatch.WorkerCancellationTokenSource.Cancel();
|
||||
// make sure worker process exits before we return, otherwise we might leave an orphan worker process behind.
|
||||
await jobDispatch.WorkerDispatch;
|
||||
return;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// we can't even query for the jobrequest from server, something totally busted, stop runner/worker.
|
||||
@@ -247,7 +258,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error(ex);
|
||||
|
||||
jobDispatch.WorkerCancellationTokenSource.Cancel();
|
||||
// make sure worker process exit before we rethrow, otherwise we might leave orphan worker process behind.
|
||||
// make sure the worker process exits before we rethrow, otherwise we might leave orphan worker process behind.
|
||||
await jobDispatch.WorkerDispatch;
|
||||
|
||||
// rethrow original exception
|
||||
@@ -256,8 +267,8 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
if (request.Result != null)
|
||||
{
|
||||
// job request has been finished, the server already has result.
|
||||
// this means runner is busted since it still running that request.
|
||||
// job request has been finished, the server already has the result.
|
||||
// this means the runner is busted since it is still running that request.
|
||||
// cancel the zombie worker, run next job request.
|
||||
Trace.Error($"Received job request while previous job {jobDispatch.JobId} still running on worker. Cancel the previous job since the job request have been finished on server side with result: {request.Result.Value}.");
|
||||
jobDispatch.WorkerCancellationTokenSource.Cancel();
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -11,6 +10,8 @@ using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -72,6 +73,46 @@ namespace GitHub.Runner.Listener
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
if (command.Check)
|
||||
{
|
||||
var url = command.GetUrl();
|
||||
var pat = command.GetGitHubPersonalAccessToken(required: true);
|
||||
var checkExtensions = HostContext.GetService<IExtensionManager>().GetExtensions<ICheckExtension>();
|
||||
var sortedChecks = checkExtensions.OrderBy(x => x.Order);
|
||||
foreach (var check in sortedChecks)
|
||||
{
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Check: {check.CheckName}");
|
||||
_term.WriteLine($"** Description: {check.CheckDescription}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
var result = await check.RunCheck(url, pat);
|
||||
if (!result)
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** F A I L **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"** Help Doc: {check.HelpLink}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** P A S S **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteLine();
|
||||
}
|
||||
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
// Configure runner prompt for args if not supplied
|
||||
// Unattended configure mode will not prompt for args if not supplied and error on any missing or invalid value.
|
||||
if (command.Configure)
|
||||
@@ -460,6 +501,7 @@ Options:
|
||||
--help Prints the help for each command
|
||||
--version Prints the runner version
|
||||
--commit Prints the runner commit
|
||||
--check Check the runner's network connectivity with GitHub server
|
||||
|
||||
Config Options:
|
||||
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
|
||||
@@ -469,7 +511,8 @@ Config Options:
|
||||
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
||||
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)");
|
||||
--replace Replace any existing runner with the same name (default false)
|
||||
--pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check`");
|
||||
#if OS_WINDOWS
|
||||
_term.WriteLine($@" --runasservice Run the runner as a service");
|
||||
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||
@@ -477,6 +520,8 @@ Config Options:
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
Examples:
|
||||
Check GitHub server network connectivity:
|
||||
.{separator}run.{ext} --check --url <url> --pat <pat>
|
||||
Configure a runner non-interactively:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token>
|
||||
Configure a runner non-interactively, replacing any existing runner with the same name:
|
||||
|
||||
@@ -8,7 +8,9 @@ using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Security.Cryptography;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
@@ -110,7 +112,7 @@ namespace GitHub.Runner.Listener
|
||||
// old server won't send target version as part of update message.
|
||||
if (string.IsNullOrEmpty(targetVersion))
|
||||
{
|
||||
var packages = await _runnerServer.GetPackagesAsync(_packageType, _platform, 1, token);
|
||||
var packages = await _runnerServer.GetPackagesAsync(_packageType, _platform, 1, true, token);
|
||||
if (packages == null || packages.Count == 0)
|
||||
{
|
||||
Trace.Info($"There is no package for {_packageType} and {_platform}.");
|
||||
@@ -121,7 +123,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else
|
||||
{
|
||||
_targetPackage = await _runnerServer.GetPackageAsync(_packageType, _platform, targetVersion, token);
|
||||
_targetPackage = await _runnerServer.GetPackageAsync(_packageType, _platform, targetVersion, true, token);
|
||||
if (_targetPackage == null)
|
||||
{
|
||||
Trace.Info($"There is no package for {_packageType} and {_platform} with version {targetVersion}.");
|
||||
@@ -211,12 +213,22 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
//open zip stream in async mode
|
||||
using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler()))
|
||||
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
if (!string.IsNullOrEmpty(_targetPackage.Token))
|
||||
{
|
||||
Trace.Info($"Adding authorization token ({_targetPackage.Token.Length} chars)");
|
||||
httpClient.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", _targetPackage.Token);
|
||||
}
|
||||
|
||||
Trace.Info($"Downloading {_targetPackage.DownloadUrl}");
|
||||
|
||||
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Download runner: finished download");
|
||||
@@ -246,6 +258,24 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// If we got this far, we know that we've successfully downloaded the runner package
|
||||
// Validate Hash Matches if it is provided
|
||||
using (FileStream stream = File.OpenRead(archiveFile))
|
||||
{
|
||||
if (!String.IsNullOrEmpty(_targetPackage.HashValue))
|
||||
{
|
||||
using (SHA256 sha256 = SHA256.Create())
|
||||
{
|
||||
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||
if (hash != _targetPackage.HashValue)
|
||||
{
|
||||
// Hash did not match, we can't recover from this, just throw
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {_targetPackage.HashValue} for {_targetPackage.Filename}");
|
||||
}
|
||||
Trace.Info($"Validated Runner Hash matches {_targetPackage.Filename} : {_targetPackage.HashValue}");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ZipFile.ExtractToDirectory(archiveFile, latestRunnerDirectory);
|
||||
@@ -327,8 +357,13 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Copy any remaining .sh/.cmd files into runner root.");
|
||||
foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0])
|
||||
{
|
||||
// Copy and replace the file.
|
||||
file.CopyTo(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name), true);
|
||||
string destination = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name);
|
||||
|
||||
// Removing the file instead of just trying to overwrite it works around permissions issues on linux.
|
||||
// https://github.com/actions/runner/issues/981
|
||||
Trace.Info($"Copy {file.FullName} to {destination}");
|
||||
IOUtil.DeleteFile(destination);
|
||||
file.CopyTo(destination, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
19
src/Runner.Sdk/BuildConstants.cs
Normal file
19
src/Runner.Sdk/BuildConstants.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
namespace GitHub.Runner.Sdk
|
||||
{
|
||||
/***
|
||||
* WARNING: This file is automatically regenerated on layout so the runner can provide version/commit info (do not manually edit it).
|
||||
*/
|
||||
public static class BuildConstants
|
||||
{
|
||||
public static class Source
|
||||
{
|
||||
public static readonly string CommitHash = "N/A";
|
||||
}
|
||||
|
||||
public static class RunnerPackage
|
||||
{
|
||||
public static readonly string PackageName = "N/A";
|
||||
public static readonly string Version = "0";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,7 +47,7 @@ namespace GitHub.Runner.Sdk
|
||||
return StringUtil.ConvertFromJson<T>(json);
|
||||
}
|
||||
|
||||
public static string GetPathHash(string path)
|
||||
public static string GetSha256Hash(string path)
|
||||
{
|
||||
string hashString = path.ToLowerInvariant();
|
||||
using (SHA256 sha256hash = SHA256.Create())
|
||||
|
||||
@@ -4,6 +4,13 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public static class UrlUtil
|
||||
{
|
||||
public static bool IsHostedServer(UriBuilder gitHubUrl)
|
||||
{
|
||||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)
|
||||
{
|
||||
ArgUtil.NotNull(baseUrl, nameof(baseUrl));
|
||||
|
||||
@@ -115,11 +115,15 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
}
|
||||
|
||||
trace?.Info("Not found.");
|
||||
#if OS_WINDOWS
|
||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'Path' environment variable.");
|
||||
#else
|
||||
trace?.Info($"{command}: command not found. Make sure '{command}' is installed and its location included in the 'PATH' environment variable.");
|
||||
#endif
|
||||
if (require)
|
||||
{
|
||||
throw new FileNotFoundException(
|
||||
message: $"File not found: '{command}'",
|
||||
message: $"{command}: command not found",
|
||||
fileName: command);
|
||||
}
|
||||
|
||||
|
||||
@@ -75,11 +75,19 @@ namespace GitHub.Runner.Worker
|
||||
return false;
|
||||
}
|
||||
|
||||
// process action command in serialize order.
|
||||
if (!ActionCommandManager.EnhancedAnnotationsEnabled(context) && actionCommand.Command == "notice")
|
||||
{
|
||||
context.Debug($"Enhanced Annotations not enabled on the server: 'notice' command will not be processed.");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Serialize order
|
||||
lock (_commandSerializeLock)
|
||||
{
|
||||
// Currently stopped
|
||||
if (_stopProcessCommand)
|
||||
{
|
||||
// Resume token
|
||||
if (!string.IsNullOrEmpty(_stopToken) &&
|
||||
string.Equals(actionCommand.Command, _stopToken, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
@@ -96,8 +104,10 @@ namespace GitHub.Runner.Worker
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Currently processing
|
||||
else
|
||||
{
|
||||
// Stop command
|
||||
if (string.Equals(actionCommand.Command, _stopCommand, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
context.Output(input);
|
||||
@@ -107,6 +117,7 @@ namespace GitHub.Runner.Worker
|
||||
_registeredCommands.Add(_stopToken);
|
||||
return true;
|
||||
}
|
||||
// Found command
|
||||
else if (_commandExtensions.TryGetValue(actionCommand.Command, out IActionCommandExtension extension))
|
||||
{
|
||||
if (context.EchoOnActionCommand && !extension.OmitEcho)
|
||||
@@ -126,6 +137,7 @@ namespace GitHub.Runner.Worker
|
||||
context.CommandResult = TaskResult.Failed;
|
||||
}
|
||||
}
|
||||
// Command not found
|
||||
else
|
||||
{
|
||||
context.Warning($"Can't find command extension for ##[{actionCommand.Command}.command].");
|
||||
@@ -135,6 +147,10 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal static bool EnhancedAnnotationsEnabled(IExecutionContext context) {
|
||||
return context.Global.Variables.GetBoolean("DistributedTask.EnhancedAnnotations") ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
public interface IActionCommandExtension : IExtension
|
||||
@@ -492,6 +508,13 @@ namespace GitHub.Runner.Worker
|
||||
public override string Command => "error";
|
||||
}
|
||||
|
||||
public sealed class NoticeCommandExtension : IssueCommandExtension
|
||||
{
|
||||
public override IssueType Type => IssueType.Notice;
|
||||
|
||||
public override string Command => "notice";
|
||||
}
|
||||
|
||||
public abstract class IssueCommandExtension : RunnerService, IActionCommandExtension
|
||||
{
|
||||
public abstract IssueType Type { get; }
|
||||
@@ -506,6 +529,11 @@ namespace GitHub.Runner.Worker
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Column, out string column);
|
||||
|
||||
if (!ActionCommandManager.EnhancedAnnotationsEnabled(context))
|
||||
{
|
||||
context.Debug("Enhanced Annotations not enabled on the server. The 'title', 'end_line', and 'end_column' fields are unsupported.");
|
||||
}
|
||||
|
||||
Issue issue = new Issue()
|
||||
{
|
||||
Category = "General",
|
||||
@@ -557,13 +585,73 @@ namespace GitHub.Runner.Worker
|
||||
context.AddIssue(issue);
|
||||
}
|
||||
|
||||
public static void ValidateLinesAndColumns(ActionCommand command, IExecutionContext context)
|
||||
{
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.EndLine, out string endLine);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Column, out string column);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.EndColumn, out string endColumn);
|
||||
|
||||
var hasStartLine = int.TryParse(line, out int lineNumber);
|
||||
var hasEndLine = int.TryParse(endLine, out int endLineNumber);
|
||||
var hasStartColumn = int.TryParse(column, out int columnNumber);
|
||||
var hasEndColumn = int.TryParse(endColumn, out int endColumnNumber);
|
||||
var hasColumn = hasStartColumn || hasEndColumn;
|
||||
|
||||
if (hasEndLine && !hasStartLine)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.EndLine}' can only be set if '{IssueCommandProperties.Line}' is provided");
|
||||
command.Properties[IssueCommandProperties.Line] = endLine;
|
||||
hasStartLine = true;
|
||||
line = endLine;
|
||||
}
|
||||
|
||||
if (hasEndColumn && !hasStartColumn)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.EndColumn}' can only be set if '{IssueCommandProperties.Column}' is provided");
|
||||
command.Properties[IssueCommandProperties.Column] = endColumn;
|
||||
hasStartColumn = true;
|
||||
column = endColumn;
|
||||
}
|
||||
|
||||
if (!hasStartLine && hasColumn)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.Column}' and '{IssueCommandProperties.EndColumn}' can only be set if '{IssueCommandProperties.Line}' value is provided.");
|
||||
command.Properties.Remove(IssueCommandProperties.Column);
|
||||
command.Properties.Remove(IssueCommandProperties.EndColumn);
|
||||
}
|
||||
|
||||
if (hasEndLine && line != endLine && hasColumn)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.Column}' and '{IssueCommandProperties.EndColumn}' cannot be set if '{IssueCommandProperties.Line}' and '{IssueCommandProperties.EndLine}' are different values.");
|
||||
command.Properties.Remove(IssueCommandProperties.Column);
|
||||
command.Properties.Remove(IssueCommandProperties.EndColumn);
|
||||
}
|
||||
|
||||
if (hasStartLine && hasEndLine && endLineNumber < lineNumber)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.EndLine}' cannot be less than '{IssueCommandProperties.Line}'.");
|
||||
command.Properties.Remove(IssueCommandProperties.Line);
|
||||
command.Properties.Remove(IssueCommandProperties.EndLine);
|
||||
}
|
||||
|
||||
if (hasStartColumn && hasEndColumn && endColumnNumber < columnNumber)
|
||||
{
|
||||
context.Debug($"Invalid {command.Command} command value. '{IssueCommandProperties.EndColumn}' cannot be less than '{IssueCommandProperties.Column}'.");
|
||||
command.Properties.Remove(IssueCommandProperties.Column);
|
||||
command.Properties.Remove(IssueCommandProperties.EndColumn);
|
||||
}
|
||||
}
|
||||
|
||||
private static class IssueCommandProperties
|
||||
{
|
||||
public const String File = "file";
|
||||
public const String Line = "line";
|
||||
public const String EndLine = "endLine";
|
||||
public const String Column = "col";
|
||||
public const String EndColumn = "endColumn";
|
||||
public const String Title = "title";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public sealed class GroupCommandExtension : GroupingCommandExtension
|
||||
|
||||
@@ -53,30 +53,63 @@ namespace GitHub.Runner.Worker
|
||||
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
||||
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
|
||||
{
|
||||
// Assert inputs
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
ArgUtil.NotNull(steps, nameof(steps));
|
||||
|
||||
executionContext.Output("Prepare all required actions");
|
||||
Dictionary<string, List<Guid>> imagesToPull = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
|
||||
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
|
||||
Dictionary<Guid, IActionRunner> preStepTracker = new Dictionary<Guid, IActionRunner>();
|
||||
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
|
||||
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
// Log even if we aren't using it to ensure users know.
|
||||
if (!string.IsNullOrEmpty(executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||
var state = new PrepareActionsState
|
||||
{
|
||||
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
|
||||
ImagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase),
|
||||
ImagesToPull = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase),
|
||||
ImagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase),
|
||||
PreStepTracker = new Dictionary<Guid, IActionRunner>()
|
||||
};
|
||||
var containerSetupSteps = new List<JobExtensionRunner>();
|
||||
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
||||
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
|
||||
executionContext.Output("Prepare all required actions");
|
||||
var result = await PrepareActionsRecursiveAsync(executionContext, state, actions, 0);
|
||||
if (state.ImagesToPull.Count > 0)
|
||||
{
|
||||
foreach (var imageToPull in result.ImagesToPull)
|
||||
{
|
||||
Trace.Info($"{imageToPull.Value.Count} steps need to pull image '{imageToPull.Key}'");
|
||||
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.PullActionContainerAsync,
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
displayName: $"Pull {imageToPull.Key}",
|
||||
data: new ContainerSetupInfo(imageToPull.Value, imageToPull.Key)));
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the cache (for self-hosted runners)
|
||||
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
||||
if (result.ImagesToBuild.Count > 0)
|
||||
{
|
||||
foreach (var imageToBuild in result.ImagesToBuild)
|
||||
{
|
||||
var setupInfo = result.ImagesToBuildInfo[imageToBuild.Key];
|
||||
Trace.Info($"{imageToBuild.Value.Count} steps need to build image from '{setupInfo.Dockerfile}'");
|
||||
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.BuildActionContainerAsync,
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
displayName: $"Build {setupInfo.ActionRepository}",
|
||||
data: new ContainerSetupInfo(imageToBuild.Value, setupInfo.Dockerfile, setupInfo.WorkingDirectory)));
|
||||
}
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
var newActionMetadata = executionContext.Global.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
|
||||
#if !OS_LINUX
|
||||
if (containerSetupSteps.Count > 0)
|
||||
{
|
||||
executionContext.Output("Container action is only supported on Linux, skip pull and build docker images.");
|
||||
containerSetupSteps.Clear();
|
||||
}
|
||||
#endif
|
||||
return new PrepareResult(containerSetupSteps, result.PreStepTracker);
|
||||
}
|
||||
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0)
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
if (depth > Constants.CompositeActionsMaxDepth)
|
||||
{
|
||||
throw new Exception($"Composite action depth exceeded max depth {Constants.CompositeActionsMaxDepth}");
|
||||
}
|
||||
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||
|
||||
foreach (var action in actions)
|
||||
@@ -88,66 +121,15 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(containerReference, nameof(containerReference));
|
||||
ArgUtil.NotNullOrEmpty(containerReference.Image, nameof(containerReference.Image));
|
||||
|
||||
if (!imagesToPull.ContainsKey(containerReference.Image))
|
||||
if (!state.ImagesToPull.ContainsKey(containerReference.Image))
|
||||
{
|
||||
imagesToPull[containerReference.Image] = new List<Guid>();
|
||||
state.ImagesToPull[containerReference.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
||||
imagesToPull[containerReference.Image].Add(action.Id);
|
||||
state.ImagesToPull[containerReference.Image].Add(action.Id);
|
||||
}
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
|
||||
{
|
||||
// only download the repository archive
|
||||
await DownloadRepositoryActionAsync(executionContext, action);
|
||||
|
||||
// more preparation base on content in the repository (action.yml)
|
||||
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||
if (setupInfo != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(setupInfo.Image))
|
||||
{
|
||||
if (!imagesToPull.ContainsKey(setupInfo.Image))
|
||||
{
|
||||
imagesToPull[setupInfo.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
|
||||
imagesToPull[setupInfo.Image].Add(action.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
|
||||
|
||||
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
|
||||
{
|
||||
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
|
||||
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
|
||||
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||
}
|
||||
}
|
||||
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
actionRunner.Stage = ActionRunStage.Pre;
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
preStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
repositoryActions.Add(action);
|
||||
}
|
||||
@@ -179,38 +161,42 @@ namespace GitHub.Runner.Worker
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||
if (setupInfo != null)
|
||||
if (setupInfo != null && setupInfo.Container != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(setupInfo.Image))
|
||||
if (!string.IsNullOrEmpty(setupInfo.Container.Image))
|
||||
{
|
||||
if (!imagesToPull.ContainsKey(setupInfo.Image))
|
||||
if (!state.ImagesToPull.ContainsKey(setupInfo.Container.Image))
|
||||
{
|
||||
imagesToPull[setupInfo.Image] = new List<Guid>();
|
||||
state.ImagesToPull[setupInfo.Container.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
|
||||
imagesToPull[setupInfo.Image].Add(action.Id);
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to pull image '{setupInfo.Container.Image}'");
|
||||
state.ImagesToPull[setupInfo.Container.Image].Add(action.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.ActionRepository, nameof(setupInfo.Container.ActionRepository));
|
||||
|
||||
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
|
||||
if (!state.ImagesToBuild.ContainsKey(setupInfo.Container.ActionRepository))
|
||||
{
|
||||
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
|
||||
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
|
||||
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to build image '{setupInfo.Container.Dockerfile}'");
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository].Add(action.Id);
|
||||
state.ImagesToBuildInfo[setupInfo.Container.ActionRepository] = setupInfo.Container;
|
||||
}
|
||||
}
|
||||
|
||||
else if(setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
|
||||
{
|
||||
state = await PrepareActionsRecursiveAsync(executionContext, state, setupInfo.Steps, depth + 1);
|
||||
}
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
// TODO: Support pre's in composite actions
|
||||
if (definition.Data.Execution.HasPre && depth < 1)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
@@ -218,46 +204,13 @@ namespace GitHub.Runner.Worker
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
preStepTracker[action.Id] = actionRunner;
|
||||
state.PreStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (imagesToPull.Count > 0)
|
||||
{
|
||||
foreach (var imageToPull in imagesToPull)
|
||||
{
|
||||
Trace.Info($"{imageToPull.Value.Count} steps need to pull image '{imageToPull.Key}'");
|
||||
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.PullActionContainerAsync,
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
displayName: $"Pull {imageToPull.Key}",
|
||||
data: new ContainerSetupInfo(imageToPull.Value, imageToPull.Key)));
|
||||
}
|
||||
}
|
||||
|
||||
if (imagesToBuild.Count > 0)
|
||||
{
|
||||
foreach (var imageToBuild in imagesToBuild)
|
||||
{
|
||||
var setupInfo = imagesToBuildInfo[imageToBuild.Key];
|
||||
Trace.Info($"{imageToBuild.Value.Count} steps need to build image from '{setupInfo.Dockerfile}'");
|
||||
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.BuildActionContainerAsync,
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
displayName: $"Build {setupInfo.ActionRepository}",
|
||||
data: new ContainerSetupInfo(imageToBuild.Value, setupInfo.Dockerfile, setupInfo.WorkingDirectory)));
|
||||
}
|
||||
}
|
||||
|
||||
#if !OS_LINUX
|
||||
if (containerSetupSteps.Count > 0)
|
||||
{
|
||||
executionContext.Output("Container action is only supported on Linux, skip pull and build docker images.");
|
||||
containerSetupSteps.Clear();
|
||||
}
|
||||
#endif
|
||||
|
||||
return new PrepareResult(containerSetupSteps, preStepTracker);
|
||||
return state;
|
||||
}
|
||||
|
||||
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
|
||||
@@ -471,12 +424,12 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Output($"##[group]Pull down action image '{setupInfo.Container.Image}'");
|
||||
|
||||
// Pull down docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
var dockerManager = HostContext.GetService<IDockerCommandManager>();
|
||||
int retryCount = 0;
|
||||
int pullExitCode = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
pullExitCode = await dockerManger.DockerPull(executionContext, setupInfo.Container.Image);
|
||||
pullExitCode = await dockerManager.DockerPull(executionContext, setupInfo.Container.Image);
|
||||
if (pullExitCode == 0)
|
||||
{
|
||||
break;
|
||||
@@ -515,13 +468,13 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Output($"##[group]Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||
|
||||
// Build docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
var dockerManager = HostContext.GetService<IDockerCommandManager>();
|
||||
int retryCount = 0;
|
||||
int buildExitCode = 0;
|
||||
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
|
||||
var imageName = $"{dockerManager.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
|
||||
while (retryCount < 3)
|
||||
{
|
||||
buildExitCode = await dockerManger.DockerBuild(
|
||||
buildExitCode = await dockerManager.DockerBuild(
|
||||
executionContext,
|
||||
setupInfo.Container.WorkingDirectory,
|
||||
setupInfo.Container.Dockerfile,
|
||||
@@ -594,7 +547,7 @@ namespace GitHub.Runner.Worker
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
||||
{
|
||||
if (attempt < 3)
|
||||
{
|
||||
@@ -609,7 +562,18 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new WebApi.FailedToResolveActionDownloadInfoException("Failed to resolve action download info.", ex);
|
||||
// Some possible cases are:
|
||||
// * Repo is rate limited
|
||||
// * Repo or tag doesn't exist, or isn't public
|
||||
if (ex is WebApi.UnresolvableActionDownloadInfoException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
else
|
||||
{
|
||||
// This exception will be traced as an infrastructure failure
|
||||
throw new WebApi.FailedToResolveActionDownloadInfoException("Failed to resolve action download info.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -636,90 +600,6 @@ namespace GitHub.Runner.Worker
|
||||
return actionDownloadInfos.Actions;
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
{
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
|
||||
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
|
||||
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
|
||||
|
||||
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
Trace.Info($"Repository action is in 'self' repository.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new NotSupportedException(repositoryReference.RepositoryType);
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
|
||||
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
||||
|
||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
if (File.Exists(watermarkFile))
|
||||
{
|
||||
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
// make sure we get a clean folder ready to use.
|
||||
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
|
||||
Directory.CreateDirectory(destDirectory);
|
||||
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
|
||||
}
|
||||
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
|
||||
if (isHostedServer)
|
||||
{
|
||||
string apiUrl = GetApiUrl(executionContext);
|
||||
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
|
||||
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
string apiUrl = GetApiUrl(executionContext);
|
||||
|
||||
// URLs to try:
|
||||
var downloadAttempts = new List<ActionDownloadDetails> {
|
||||
// A built-in action or an action the user has created, on their GHES instance
|
||||
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
|
||||
new ActionDownloadDetails(
|
||||
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||
ConfigureAuthorizationFromContext),
|
||||
|
||||
// The same action, on GitHub.com
|
||||
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
|
||||
new ActionDownloadDetails(
|
||||
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
|
||||
};
|
||||
|
||||
foreach (var downloadAttempt in downloadAttempts)
|
||||
{
|
||||
try
|
||||
{
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
|
||||
return;
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
|
||||
{
|
||||
Trace.Entering();
|
||||
@@ -743,7 +623,7 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
|
||||
}
|
||||
|
||||
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo, destDirectory);
|
||||
}
|
||||
|
||||
private string GetApiUrl(IExecutionContext executionContext)
|
||||
@@ -766,8 +646,7 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
}
|
||||
|
||||
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
||||
{
|
||||
//download and extract action in a temp folder and rename it on success
|
||||
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
||||
@@ -775,10 +654,10 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
#if OS_WINDOWS
|
||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
|
||||
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||
string link = downloadInfo?.ZipballUrl;
|
||||
#else
|
||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
|
||||
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||
string link = downloadInfo?.TarballUrl;
|
||||
#endif
|
||||
|
||||
Trace.Info($"Save archive '{link}' into {archiveFile}.");
|
||||
@@ -800,16 +679,7 @@ namespace GitHub.Runner.Worker
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
// Legacy
|
||||
if (downloadInfo == null)
|
||||
{
|
||||
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
|
||||
}
|
||||
// FF DistributedTask.NewActionMetadata
|
||||
else
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
||||
}
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(link))
|
||||
@@ -949,7 +819,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
|
||||
{
|
||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
||||
@@ -975,7 +844,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
||||
|
||||
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
private ActionSetupInfo PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
{
|
||||
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
|
||||
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
@@ -983,8 +852,8 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Repository action is in 'self' repository.");
|
||||
return null;
|
||||
}
|
||||
|
||||
var setupInfo = new ActionContainer();
|
||||
var setupInfo = new ActionSetupInfo();
|
||||
var actionContainer = new ActionContainer();
|
||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
|
||||
string actionEntryDirectory = destDirectory;
|
||||
string dockerFileRelativePath = repositoryReference.Name;
|
||||
@@ -993,11 +862,11 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
actionEntryDirectory = Path.Combine(destDirectory, repositoryReference.Path);
|
||||
dockerFileRelativePath = $"{dockerFileRelativePath}/{repositoryReference.Path}";
|
||||
setupInfo.ActionRepository = $"{repositoryReference.Name}/{repositoryReference.Path}@{repositoryReference.Ref}";
|
||||
actionContainer.ActionRepository = $"{repositoryReference.Name}/{repositoryReference.Path}@{repositoryReference.Ref}";
|
||||
}
|
||||
else
|
||||
{
|
||||
setupInfo.ActionRepository = $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
actionContainer.ActionRepository = $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
}
|
||||
|
||||
// find the docker file or action.yml file
|
||||
@@ -1027,8 +896,9 @@ namespace GitHub.Runner.Worker
|
||||
var dockerFileFullPath = Path.Combine(actionEntryDirectory, containerAction.Image);
|
||||
executionContext.Debug($"Dockerfile for action: '{dockerFileFullPath}'.");
|
||||
|
||||
setupInfo.Dockerfile = dockerFileFullPath;
|
||||
setupInfo.WorkingDirectory = destDirectory;
|
||||
actionContainer.Dockerfile = dockerFileFullPath;
|
||||
actionContainer.WorkingDirectory = destDirectory;
|
||||
setupInfo.Container = actionContainer;
|
||||
return setupInfo;
|
||||
}
|
||||
else if (containerAction.Image.StartsWith("docker://", StringComparison.OrdinalIgnoreCase))
|
||||
@@ -1037,7 +907,8 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
executionContext.Debug($"Container image for action: '{actionImage}'.");
|
||||
|
||||
setupInfo.Image = actionImage;
|
||||
actionContainer.Image = actionImage;
|
||||
setupInfo.Container = actionContainer;
|
||||
return setupInfo;
|
||||
}
|
||||
else
|
||||
@@ -1057,8 +928,21 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
|
||||
return null;
|
||||
// TODO: we need to generate unique Id's for composite steps
|
||||
Trace.Info($"Loading Composite steps");
|
||||
var compositeAction = actionDefinitionData.Execution as CompositeActionExecutionData;
|
||||
setupInfo.Steps = compositeAction.Steps;
|
||||
|
||||
foreach (var step in compositeAction.Steps)
|
||||
{
|
||||
step.Id = Guid.NewGuid();
|
||||
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && step.Reference.Type != Pipelines.ActionSourceType.Script)
|
||||
{
|
||||
throw new Exception("`uses:` keyword is not currently supported.");
|
||||
}
|
||||
}
|
||||
|
||||
return setupInfo;
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -1068,15 +952,17 @@ namespace GitHub.Runner.Worker
|
||||
else if (File.Exists(dockerFile))
|
||||
{
|
||||
executionContext.Debug($"Dockerfile for action: '{dockerFile}'.");
|
||||
setupInfo.Dockerfile = dockerFile;
|
||||
setupInfo.WorkingDirectory = destDirectory;
|
||||
actionContainer.Dockerfile = dockerFile;
|
||||
actionContainer.WorkingDirectory = destDirectory;
|
||||
setupInfo.Container = actionContainer;
|
||||
return setupInfo;
|
||||
}
|
||||
else if (File.Exists(dockerFileLowerCase))
|
||||
{
|
||||
executionContext.Debug($"Dockerfile for action: '{dockerFileLowerCase}'.");
|
||||
setupInfo.Dockerfile = dockerFileLowerCase;
|
||||
setupInfo.WorkingDirectory = destDirectory;
|
||||
actionContainer.Dockerfile = dockerFileLowerCase;
|
||||
actionContainer.WorkingDirectory = destDirectory;
|
||||
setupInfo.Container = actionContainer;
|
||||
return setupInfo;
|
||||
}
|
||||
else
|
||||
@@ -1129,20 +1015,6 @@ namespace GitHub.Runner.Worker
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private class ActionDownloadDetails
|
||||
{
|
||||
public string ArchiveLink { get; }
|
||||
|
||||
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
|
||||
|
||||
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
|
||||
{
|
||||
ArchiveLink = archiveLink;
|
||||
ConfigureAuthorization = configureAuthorization;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class Definition
|
||||
@@ -1292,4 +1164,18 @@ namespace GitHub.Runner.Worker
|
||||
public string WorkingDirectory { get; set; }
|
||||
public string ActionRepository { get; set; }
|
||||
}
|
||||
|
||||
public class ActionSetupInfo
|
||||
{
|
||||
public ActionContainer Container { get; set; }
|
||||
public List<Pipelines.ActionStep> Steps {get; set;}
|
||||
}
|
||||
|
||||
public class PrepareActionsState
|
||||
{
|
||||
public Dictionary<string, List<Guid>> ImagesToPull;
|
||||
public Dictionary<string, List<Guid>> ImagesToBuild;
|
||||
public Dictionary<string, ActionContainer> ImagesToBuildInfo;
|
||||
public Dictionary<Guid, IActionRunner> PreStepTracker;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,7 +311,7 @@ namespace GitHub.Runner.Worker
|
||||
var result = new TemplateContext
|
||||
{
|
||||
CancellationToken = CancellationToken.None,
|
||||
Errors = new TemplateValidationErrors(10, 500),
|
||||
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
Memory = new TemplateMemory(
|
||||
maxDepth: 100,
|
||||
maxEvents: 1000000,
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
DockerPath = WhichUtil.Which("docker", true, Trace);
|
||||
DockerInstanceLabel = IOUtil.GetPathHash(hostContext.GetDirectory(WellKnownDirectory.Root)).Substring(0, 6);
|
||||
DockerInstanceLabel = IOUtil.GetSha256Hash(hostContext.GetDirectory(WellKnownDirectory.Root)).Substring(0, 6);
|
||||
}
|
||||
|
||||
public async Task<DockerVersion> DockerVersion(IExecutionContext context)
|
||||
|
||||
@@ -24,12 +24,12 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public class ContainerOperationProvider : RunnerService, IContainerOperationProvider
|
||||
{
|
||||
private IDockerCommandManager _dockerManger;
|
||||
private IDockerCommandManager _dockerManager;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
_dockerManager = HostContext.GetService<IDockerCommandManager>();
|
||||
}
|
||||
|
||||
public async Task StartContainersAsync(IExecutionContext executionContext, object data)
|
||||
@@ -92,7 +92,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Check docker client/server version
|
||||
executionContext.Output("##[group]Checking docker version");
|
||||
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
|
||||
DockerVersion dockerVersion = await _dockerManager.DockerVersion(executionContext);
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
|
||||
@@ -106,26 +106,26 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (dockerVersion.ServerVersion < requiredDockerEngineAPIVersion)
|
||||
{
|
||||
throw new NotSupportedException($"Min required docker engine API server version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManger.DockerPath}') server version is '{dockerVersion.ServerVersion}'");
|
||||
throw new NotSupportedException($"Min required docker engine API server version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') server version is '{dockerVersion.ServerVersion}'");
|
||||
}
|
||||
if (dockerVersion.ClientVersion < requiredDockerEngineAPIVersion)
|
||||
{
|
||||
throw new NotSupportedException($"Min required docker engine API client version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManger.DockerPath}') client version is '{dockerVersion.ClientVersion}'");
|
||||
throw new NotSupportedException($"Min required docker engine API client version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') client version is '{dockerVersion.ClientVersion}'");
|
||||
}
|
||||
|
||||
// Clean up containers left by previous runs
|
||||
executionContext.Output("##[group]Clean up resources from previous jobs");
|
||||
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
|
||||
var staleContainers = await _dockerManager.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManager.DockerInstanceLabel}\"");
|
||||
foreach (var staleContainer in staleContainers)
|
||||
{
|
||||
int containerRemoveExitCode = await _dockerManger.DockerRemove(executionContext, staleContainer);
|
||||
int containerRemoveExitCode = await _dockerManager.DockerRemove(executionContext, staleContainer);
|
||||
if (containerRemoveExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Delete stale containers failed, docker rm fail with exit code {containerRemoveExitCode} for container {staleContainer}");
|
||||
}
|
||||
}
|
||||
|
||||
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
|
||||
int networkPruneExitCode = await _dockerManager.DockerNetworkPrune(executionContext);
|
||||
if (networkPruneExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
|
||||
@@ -198,8 +198,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Add at a later date. This currently no local package registry to test with
|
||||
// UpdateRegistryAuthForGitHubToken(executionContext, container);
|
||||
UpdateRegistryAuthForGitHubToken(executionContext, container);
|
||||
|
||||
// Before pulling, generate client authentication if required
|
||||
var configLocation = await ContainerRegistryLogin(executionContext, container);
|
||||
@@ -209,7 +208,7 @@ namespace GitHub.Runner.Worker
|
||||
int pullExitCode = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage, configLocation);
|
||||
pullExitCode = await _dockerManager.DockerPull(executionContext, container.ContainerImage, configLocation);
|
||||
if (pullExitCode == 0)
|
||||
{
|
||||
break;
|
||||
@@ -267,11 +266,11 @@ namespace GitHub.Runner.Worker
|
||||
container.ContainerEntryPointArgs = "\"-f\" \"/dev/null\"";
|
||||
}
|
||||
|
||||
container.ContainerId = await _dockerManger.DockerCreate(executionContext, container);
|
||||
container.ContainerId = await _dockerManager.DockerCreate(executionContext, container);
|
||||
ArgUtil.NotNullOrEmpty(container.ContainerId, nameof(container.ContainerId));
|
||||
|
||||
// Start container
|
||||
int startExitCode = await _dockerManger.DockerStart(executionContext, container.ContainerId);
|
||||
int startExitCode = await _dockerManager.DockerStart(executionContext, container.ContainerId);
|
||||
if (startExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker start fail with exit code {startExitCode}");
|
||||
@@ -280,12 +279,12 @@ namespace GitHub.Runner.Worker
|
||||
try
|
||||
{
|
||||
// Make sure container is up and running
|
||||
var psOutputs = await _dockerManger.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --filter status=running --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\"");
|
||||
var psOutputs = await _dockerManager.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --filter status=running --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\"");
|
||||
if (psOutputs.FirstOrDefault(x => !string.IsNullOrEmpty(x))?.StartsWith(container.ContainerId) != true)
|
||||
{
|
||||
// container is not up and running, pull docker log for this container.
|
||||
await _dockerManger.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\"");
|
||||
int logsExitCode = await _dockerManger.DockerLogs(executionContext, container.ContainerId);
|
||||
await _dockerManager.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\"");
|
||||
int logsExitCode = await _dockerManager.DockerLogs(executionContext, container.ContainerId);
|
||||
if (logsExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Docker logs fail with exit code {logsExitCode}");
|
||||
@@ -310,7 +309,7 @@ namespace GitHub.Runner.Worker
|
||||
["ports"] = new DictionaryContextData(),
|
||||
["network"] = new StringContextData(container.ContainerNetwork)
|
||||
};
|
||||
container.AddPortMappings(await _dockerManger.DockerPort(executionContext, container.ContainerId));
|
||||
container.AddPortMappings(await _dockerManager.DockerPort(executionContext, container.ContainerId));
|
||||
foreach (var port in container.PortMappings)
|
||||
{
|
||||
(service["ports"] as DictionaryContextData)[port.ContainerPort] = new StringContextData(port.HostPort);
|
||||
@@ -320,7 +319,7 @@ namespace GitHub.Runner.Worker
|
||||
else
|
||||
{
|
||||
var configEnvFormat = "--format \"{{range .Config.Env}}{{println .}}{{end}}\"";
|
||||
var containerEnv = await _dockerManger.DockerInspect(executionContext, container.ContainerId, configEnvFormat);
|
||||
var containerEnv = await _dockerManager.DockerInspect(executionContext, container.ContainerId, configEnvFormat);
|
||||
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
|
||||
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
|
||||
}
|
||||
@@ -337,7 +336,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
executionContext.Output($"Stop and remove container: {container.ContainerDisplayName}");
|
||||
|
||||
int rmExitCode = await _dockerManger.DockerRemove(executionContext, container.ContainerId);
|
||||
int rmExitCode = await _dockerManager.DockerRemove(executionContext, container.ContainerId);
|
||||
if (rmExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Docker rm fail with exit code {rmExitCode}");
|
||||
@@ -397,7 +396,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
int networkExitCode = await _dockerManger.DockerNetworkCreate(executionContext, network);
|
||||
int networkExitCode = await _dockerManager.DockerNetworkCreate(executionContext, network);
|
||||
if (networkExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker network create failed with exit code {networkExitCode}");
|
||||
@@ -412,7 +411,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
executionContext.Output($"Remove container network: {network}");
|
||||
|
||||
int removeExitCode = await _dockerManger.DockerNetworkRemove(executionContext, network);
|
||||
int removeExitCode = await _dockerManager.DockerNetworkRemove(executionContext, network);
|
||||
if (removeExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Docker network rm failed with exit code {removeExitCode}");
|
||||
@@ -422,7 +421,7 @@ namespace GitHub.Runner.Worker
|
||||
private async Task ContainerHealthcheck(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
string healthCheck = "--format=\"{{if .Config.Healthcheck}}{{print .State.Health.Status}}{{end}}\"";
|
||||
string serviceHealth = (await _dockerManger.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault();
|
||||
string serviceHealth = (await _dockerManager.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault();
|
||||
if (string.IsNullOrEmpty(serviceHealth))
|
||||
{
|
||||
// Container has no HEALTHCHECK
|
||||
@@ -434,7 +433,7 @@ namespace GitHub.Runner.Worker
|
||||
TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(retryCount, TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(32), TimeSpan.FromSeconds(2));
|
||||
executionContext.Output($"{container.ContainerNetworkAlias} service is starting, waiting {backoff.Seconds} seconds before checking again.");
|
||||
await Task.Delay(backoff, executionContext.CancellationToken);
|
||||
serviceHealth = (await _dockerManger.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault();
|
||||
serviceHealth = (await _dockerManager.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault();
|
||||
retryCount++;
|
||||
}
|
||||
if (string.Equals(serviceHealth, "healthy", StringComparison.OrdinalIgnoreCase))
|
||||
@@ -463,7 +462,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
|
||||
}
|
||||
var loginExitCode = await _dockerManger.DockerLogin(
|
||||
var loginExitCode = await _dockerManager.DockerLogin(
|
||||
executionContext,
|
||||
configLocation,
|
||||
container.RegistryServer,
|
||||
@@ -494,31 +493,14 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private void UpdateRegistryAuthForGitHubToken(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
var registryIsTokenCompatible = container.RegistryServer.Equals("docker.pkg.github.com", StringComparison.OrdinalIgnoreCase);
|
||||
var registryIsTokenCompatible = container.RegistryServer.Equals("ghcr.io", StringComparison.OrdinalIgnoreCase) || container.RegistryServer.Equals("containers.pkg.github.com", StringComparison.OrdinalIgnoreCase);
|
||||
if (!registryIsTokenCompatible)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var registryMatchesWorkflow = false;
|
||||
|
||||
// REGISTRY/OWNER/REPO/IMAGE[:TAG]
|
||||
var imageParts = container.ContainerImage.Split('/');
|
||||
if (imageParts.Length != 4)
|
||||
{
|
||||
executionContext.Warning($"Could not identify owner and repo for container image {container.ContainerImage}. Skipping automatic token auth");
|
||||
return;
|
||||
}
|
||||
var owner = imageParts[1];
|
||||
var repo = imageParts[2];
|
||||
var nwo = $"{owner}/{repo}";
|
||||
if (nwo.Equals(executionContext.GetGitHubContext("repository"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
registryMatchesWorkflow = true;
|
||||
}
|
||||
|
||||
var registryCredentialsNotSupplied = string.IsNullOrEmpty(container.RegistryAuthUsername) && string.IsNullOrEmpty(container.RegistryAuthPassword);
|
||||
if (registryCredentialsNotSupplied && registryMatchesWorkflow)
|
||||
if (registryCredentialsNotSupplied)
|
||||
{
|
||||
container.RegistryAuthUsername = executionContext.GetGitHubContext("actor");
|
||||
container.RegistryAuthPassword = executionContext.GetGitHubContext("token");
|
||||
|
||||
@@ -61,14 +61,15 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
bool EchoOnActionCommand { get; set; }
|
||||
|
||||
bool InsideComposite { get; }
|
||||
bool IsEmbedded { get; }
|
||||
|
||||
ExecutionContext Root { get; }
|
||||
|
||||
// Initialize
|
||||
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
||||
void CancelToken();
|
||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null);
|
||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null);
|
||||
IExecutionContext CreateEmbeddedChild(string scopeName, string contextName);
|
||||
|
||||
// logging
|
||||
long Write(string tag, string message);
|
||||
@@ -99,7 +100,6 @@ namespace GitHub.Runner.Worker
|
||||
// others
|
||||
void ForceTaskComplete();
|
||||
void RegisterPostJobStep(IStep step);
|
||||
IStep CreateCompositeStep(string scopeName, IActionRunner step, DictionaryContextData inputsData, Dictionary<string, string> envData);
|
||||
}
|
||||
|
||||
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
||||
@@ -157,7 +157,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public bool EchoOnActionCommand { get; set; }
|
||||
|
||||
public bool InsideComposite { get; private set; }
|
||||
// An embedded execution context shares the same record ID, record name, and logger
|
||||
// as its enclosing execution context.
|
||||
public bool IsEmbedded { get; private set; }
|
||||
|
||||
public TaskResult? Result
|
||||
{
|
||||
@@ -243,6 +245,12 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public void RegisterPostJobStep(IStep step)
|
||||
{
|
||||
// TODO: Remove when we support composite post job steps
|
||||
if (this.IsEmbedded)
|
||||
{
|
||||
throw new Exception("Composite actions do not currently support post steps");
|
||||
|
||||
}
|
||||
if (step is IActionRunner actionRunner && !Root.StepsWithPostRegistered.Add(actionRunner.Action.Id))
|
||||
{
|
||||
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to post step stack.");
|
||||
@@ -253,36 +261,7 @@ namespace GitHub.Runner.Worker
|
||||
Root.PostJobSteps.Push(step);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper function used in CompositeActionHandler::RunAsync to
|
||||
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
|
||||
/// </summary>
|
||||
public IStep CreateCompositeStep(
|
||||
string scopeName,
|
||||
IActionRunner step,
|
||||
DictionaryContextData inputsData,
|
||||
Dictionary<string, string> envData)
|
||||
{
|
||||
step.ExecutionContext = Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, insideComposite: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
|
||||
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||
step.ExecutionContext.ExpressionValues["steps"] = Global.StepsContext.GetScope(step.ExecutionContext.GetFullyQualifiedContextName());
|
||||
|
||||
// Add the composite action environment variables to each step.
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
foreach (var pair in envData)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
return step;
|
||||
}
|
||||
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null)
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
@@ -329,11 +308,20 @@ namespace GitHub.Runner.Worker
|
||||
child._logger.Setup(_mainTimelineId, recordId);
|
||||
}
|
||||
|
||||
child.InsideComposite = insideComposite;
|
||||
child.IsEmbedded = isEmbedded;
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An embedded execution context shares the same record ID, record name, logger,
|
||||
/// and a linked cancellation token.
|
||||
/// </summary>
|
||||
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName)
|
||||
{
|
||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
|
||||
}
|
||||
|
||||
public void Start(string currentOperation = null)
|
||||
{
|
||||
_record.CurrentOperation = currentOperation ?? _record.CurrentOperation;
|
||||
@@ -528,6 +516,24 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
_record.WarningCount++;
|
||||
}
|
||||
else if (issue.Type == IssueType.Notice)
|
||||
{
|
||||
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
|
||||
issue.Data["logFileLineNumber"] = logLineNumber.ToString();
|
||||
}
|
||||
|
||||
if (_record.NoticeCount < _maxIssueCount)
|
||||
{
|
||||
_record.Issues.Add(issue);
|
||||
}
|
||||
|
||||
_record.NoticeCount++;
|
||||
}
|
||||
|
||||
_jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record);
|
||||
@@ -853,11 +859,16 @@ namespace GitHub.Runner.Worker
|
||||
_record.State = TimelineRecordState.Pending;
|
||||
_record.ErrorCount = 0;
|
||||
_record.WarningCount = 0;
|
||||
_record.NoticeCount = 0;
|
||||
|
||||
if (parentTimelineRecordId != null && parentTimelineRecordId.Value != Guid.Empty)
|
||||
{
|
||||
_record.ParentId = parentTimelineRecordId;
|
||||
}
|
||||
else if (parentTimelineRecordId == null)
|
||||
{
|
||||
_record.AgentPlatform = VarUtil.OS;
|
||||
}
|
||||
|
||||
var configuration = HostContext.GetService<IConfigurationStore>();
|
||||
_record.WorkerName = configuration.GetSettings().AgentName;
|
||||
@@ -975,7 +986,10 @@ namespace GitHub.Runner.Worker
|
||||
traceWriter = context.ToTemplateTraceWriter();
|
||||
}
|
||||
var schema = PipelineTemplateSchemaFactory.GetSchema();
|
||||
return new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable);
|
||||
return new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable)
|
||||
{
|
||||
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
};
|
||||
}
|
||||
|
||||
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
|
||||
@@ -1017,6 +1031,7 @@ namespace GitHub.Runner.Worker
|
||||
public static readonly string Command = "##[command]";
|
||||
public static readonly string Error = "##[error]";
|
||||
public static readonly string Warning = "##[warning]";
|
||||
public static readonly string Notice = "##[notice]";
|
||||
public static readonly string Debug = "##[debug]";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,11 +5,15 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Expressions;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
|
||||
@@ -26,65 +30,60 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public async Task RunAsync(ActionRunStage stage)
|
||||
{
|
||||
// Validate args.
|
||||
// Validate args
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||
ArgUtil.NotNull(Inputs, nameof(Inputs));
|
||||
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
|
||||
|
||||
// Resolve action steps
|
||||
var actionSteps = Data.Steps;
|
||||
|
||||
// Create Context Data to reuse for each composite action step
|
||||
var inputsData = new DictionaryContextData();
|
||||
foreach (var i in Inputs)
|
||||
{
|
||||
inputsData[i.Key] = new StringContextData(i.Value);
|
||||
}
|
||||
|
||||
// Initialize Composite Steps List of Steps
|
||||
var compositeSteps = new List<IStep>();
|
||||
|
||||
// Temporary hack until after M271-ish. After M271-ish the server will never send an empty
|
||||
// context name. Generated context names start with "__"
|
||||
var childScopeName = ExecutionContext.GetFullyQualifiedContextName();
|
||||
if (string.IsNullOrEmpty(childScopeName))
|
||||
{
|
||||
childScopeName = $"__{Guid.NewGuid()}";
|
||||
}
|
||||
|
||||
foreach (Pipelines.ActionStep actionStep in actionSteps)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = actionStep;
|
||||
actionRunner.Stage = stage;
|
||||
actionRunner.Condition = actionStep.Condition;
|
||||
|
||||
var step = ExecutionContext.CreateCompositeStep(childScopeName, actionRunner, inputsData, Environment);
|
||||
|
||||
// Shallow copy github context
|
||||
var gitHubContext = step.ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||
ArgUtil.NotNull(gitHubContext, nameof(gitHubContext));
|
||||
gitHubContext = gitHubContext.ShallowCopy();
|
||||
step.ExecutionContext.ExpressionValues["github"] = gitHubContext;
|
||||
|
||||
// Set GITHUB_ACTION_PATH
|
||||
step.ExecutionContext.SetGitHubContext("action_path", ActionDirectory);
|
||||
|
||||
compositeSteps.Add(step);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// This is where we run each step.
|
||||
await RunStepsAsync(compositeSteps);
|
||||
// Inputs of the composite step
|
||||
var inputsData = new DictionaryContextData();
|
||||
foreach (var i in Inputs)
|
||||
{
|
||||
inputsData[i.Key] = new StringContextData(i.Value);
|
||||
}
|
||||
|
||||
// Get the pointer of the correct "steps" object and pass it to the ExecutionContext so that we can process the outputs correctly
|
||||
// Temporary hack until after M271-ish. After M271-ish the server will never send an empty
|
||||
// context name. Generated context names start with "__"
|
||||
var childScopeName = ExecutionContext.GetFullyQualifiedContextName();
|
||||
if (string.IsNullOrEmpty(childScopeName))
|
||||
{
|
||||
childScopeName = $"__{Guid.NewGuid()}";
|
||||
}
|
||||
|
||||
// Create embedded steps
|
||||
var embeddedSteps = new List<IStep>();
|
||||
foreach (Pipelines.ActionStep stepData in Data.Steps)
|
||||
{
|
||||
var step = HostContext.CreateService<IActionRunner>();
|
||||
step.Action = stepData;
|
||||
step.Stage = stage;
|
||||
step.Condition = stepData.Condition;
|
||||
step.ExecutionContext = ExecutionContext.CreateEmbeddedChild(childScopeName, stepData.ContextName);
|
||||
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
|
||||
// Shallow copy github context
|
||||
var gitHubContext = step.ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||
ArgUtil.NotNull(gitHubContext, nameof(gitHubContext));
|
||||
gitHubContext = gitHubContext.ShallowCopy();
|
||||
step.ExecutionContext.ExpressionValues["github"] = gitHubContext;
|
||||
|
||||
// Set GITHUB_ACTION_PATH
|
||||
step.ExecutionContext.SetGitHubContext("action_path", ActionDirectory);
|
||||
|
||||
embeddedSteps.Add(step);
|
||||
}
|
||||
|
||||
// Run embedded steps
|
||||
await RunStepsAsync(embeddedSteps);
|
||||
|
||||
// Set outputs
|
||||
ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(ExecutionContext.GetFullyQualifiedContextName());
|
||||
|
||||
ProcessCompositeActionOutputs();
|
||||
|
||||
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
ProcessOutputs();
|
||||
ExecutionContext.Global.StepsContext.ClearScope(childScopeName);
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -96,7 +95,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
}
|
||||
|
||||
private void ProcessCompositeActionOutputs()
|
||||
private void ProcessOutputs()
|
||||
{
|
||||
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||
|
||||
@@ -113,69 +112,60 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
evaluateContext[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
// Get the evluated composite outputs' values mapped to the outputs named
|
||||
// Evaluate outputs
|
||||
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
|
||||
|
||||
// Set the outputs for the outputs object in the whole composite action
|
||||
// Each pair is structured like this
|
||||
// We ignore "description" for now
|
||||
// {
|
||||
// "the-output-name": {
|
||||
// "description": "",
|
||||
// "value": "the value"
|
||||
// },
|
||||
// ...
|
||||
// }
|
||||
// Set outputs
|
||||
//
|
||||
// Each pair is structured like:
|
||||
// {
|
||||
// "the-output-name": {
|
||||
// "description": "",
|
||||
// "value": "the value"
|
||||
// },
|
||||
// ...
|
||||
// }
|
||||
foreach (var pair in actionOutputs)
|
||||
{
|
||||
var outputsName = pair.Key;
|
||||
var outputsAttributes = pair.Value as DictionaryContextData;
|
||||
outputsAttributes.TryGetValue("value", out var val);
|
||||
|
||||
if (val != null)
|
||||
var outputName = pair.Key;
|
||||
var outputDefinition = pair.Value as DictionaryContextData;
|
||||
if (outputDefinition.TryGetValue("value", out var val))
|
||||
{
|
||||
var outputsValue = val as StringContextData;
|
||||
// Set output in the whole composite scope.
|
||||
if (!String.IsNullOrEmpty(outputsValue))
|
||||
{
|
||||
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.SetOutput(outputsName, "", out _);
|
||||
}
|
||||
var outputValue = val.AssertString("output value");
|
||||
ExecutionContext.SetOutput(outputName, outputValue.Value, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunStepsAsync(List<IStep> compositeSteps)
|
||||
private async Task RunStepsAsync(List<IStep> embeddedSteps)
|
||||
{
|
||||
ArgUtil.NotNull(compositeSteps, nameof(compositeSteps));
|
||||
ArgUtil.NotNull(embeddedSteps, nameof(embeddedSteps));
|
||||
|
||||
// The parent StepsRunner of the whole Composite Action Step handles the cancellation stuff already.
|
||||
foreach (IStep step in compositeSteps)
|
||||
foreach (IStep step in embeddedSteps)
|
||||
{
|
||||
Trace.Info($"Processing composite step: DisplayName='{step.DisplayName}'");
|
||||
Trace.Info($"Processing embedded step: DisplayName='{step.DisplayName}'");
|
||||
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||
// Add Expression Functions
|
||||
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for step");
|
||||
// Initialize env context
|
||||
Trace.Info("Initialize Env context for embedded step");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
// Global env
|
||||
// Merge global env
|
||||
foreach (var pair in ExecutionContext.Global.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
// Stomps over with outside step env
|
||||
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
|
||||
// Merge composite-step env
|
||||
if (ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
var dict = envContextData as DictionaryContextData;
|
||||
@@ -188,13 +178,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
}
|
||||
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
var actionStep = step as IActionRunner;
|
||||
|
||||
try
|
||||
{
|
||||
// Evaluate and merge action's env block to env context
|
||||
// Evaluate and merge embedded-step env
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, Common.Util.VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var env in actionEnvironment)
|
||||
@@ -204,39 +192,28 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// fail the step since there is an evaluate error.
|
||||
Trace.Info("Caught exception in Composite Steps Runner from expression for step.env");
|
||||
// evaluateStepEnvFailed = true;
|
||||
// Evaluation error
|
||||
Trace.Info("Caught exception from expression for embedded step.env");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Complete(TaskResult.Failed);
|
||||
}
|
||||
|
||||
await RunStepAsync(step);
|
||||
|
||||
// Directly after the step, check if the step has failed or cancelled
|
||||
// If so, return that to the output
|
||||
// Check failed or canceled
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||
{
|
||||
ExecutionContext.Result = step.ExecutionContext.Result;
|
||||
break;
|
||||
}
|
||||
|
||||
// TODO: Add compat for other types of steps.
|
||||
}
|
||||
// Completion Status handled by StepsRunner for the whole Composite Action Step
|
||||
}
|
||||
|
||||
private async Task RunStepAsync(IStep step)
|
||||
{
|
||||
// Start the step.
|
||||
Trace.Info("Starting the step.");
|
||||
Trace.Info($"Starting: {step.DisplayName}");
|
||||
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");
|
||||
|
||||
// TODO: Fix for Step Level Timeout Attributes for an individual Composite Run Step
|
||||
// For now, we are not going to support this for an individual composite run step
|
||||
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
|
||||
await Common.Util.EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
|
||||
|
||||
try
|
||||
@@ -261,7 +238,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log the error and fail the step.
|
||||
// Log the error and fail the step
|
||||
Trace.Error($"Caught exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
@@ -274,9 +251,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||
|
||||
// Complete the step context.
|
||||
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
|
||||
step.ExecutionContext.Debug($"Finished: {step.DisplayName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
@@ -37,7 +37,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Update the env dictionary.
|
||||
AddInputsToEnvironment();
|
||||
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
var dockerManager = HostContext.GetService<IDockerCommandManager>();
|
||||
|
||||
// container image haven't built/pull
|
||||
if (Data.Image.StartsWith("docker://", StringComparison.OrdinalIgnoreCase))
|
||||
@@ -52,8 +52,8 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
ExecutionContext.Output($"##[group]Building docker image");
|
||||
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
|
||||
var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
|
||||
var buildExitCode = await dockerManger.DockerBuild(
|
||||
var imageName = $"{dockerManager.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
|
||||
var buildExitCode = await dockerManager.DockerBuild(
|
||||
ExecutionContext,
|
||||
ExecutionContext.GetGitHubContext("workspace"),
|
||||
dockerFile,
|
||||
@@ -209,7 +209,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager, container))
|
||||
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager, container))
|
||||
{
|
||||
var runExitCode = await dockerManger.DockerRun(ExecutionContext, container, stdoutManager.OnDataReceived, stderrManager.OnDataReceived);
|
||||
var runExitCode = await dockerManager.DockerRun(ExecutionContext, container, stdoutManager.OnDataReceived, stderrManager.OnDataReceived);
|
||||
ExecutionContext.Debug($"Docker Action run completed with exit code {runExitCode}");
|
||||
if (runExitCode != 0)
|
||||
{
|
||||
|
||||
@@ -97,6 +97,9 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
Encoding outputEncoding = null;
|
||||
#endif
|
||||
|
||||
// Remove environment variable that may cause conflicts with the node within the runner.
|
||||
Environment.Remove("NODE_ICU_DATA"); // https://github.com/actions/runner/issues/795
|
||||
|
||||
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager))
|
||||
{
|
||||
|
||||
@@ -210,6 +210,10 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
issueType = DTWebApi.IssueType.Warning;
|
||||
}
|
||||
else if (string.Equals(match.Severity, "notice", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
issueType = DTWebApi.IssueType.Notice;
|
||||
}
|
||||
else
|
||||
{
|
||||
_executionContext.Debug($"Skipped logging an issue for the matched line because the severity '{match.Severity}' is not supported.");
|
||||
|
||||
@@ -26,7 +26,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// We don't want to display the internal workings if composite (similar/equivalent information can be found in debug)
|
||||
void writeDetails(string message)
|
||||
{
|
||||
if (ExecutionContext.InsideComposite)
|
||||
if (ExecutionContext.IsEmbedded)
|
||||
{
|
||||
ExecutionContext.Debug(message);
|
||||
}
|
||||
@@ -52,7 +52,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
firstLine = firstLine.Substring(0, firstNewLine);
|
||||
}
|
||||
|
||||
writeDetails(ExecutionContext.InsideComposite ? $"Run {firstLine}" : $"##[group]Run {firstLine}");
|
||||
writeDetails(ExecutionContext.IsEmbedded ? $"Run {firstLine}" : $"##[group]Run {firstLine}");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -138,7 +138,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
}
|
||||
|
||||
writeDetails(ExecutionContext.InsideComposite ? "" : "##[endgroup]");
|
||||
writeDetails(ExecutionContext.IsEmbedded ? "" : "##[endgroup]");
|
||||
}
|
||||
|
||||
public async Task RunAsync(ActionRunStage stage)
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
@@ -41,6 +42,8 @@ namespace GitHub.Runner.Worker
|
||||
private readonly HashSet<string> _existingProcesses = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private bool _processCleanup;
|
||||
private string _processLookupId = $"github_{Guid.NewGuid()}";
|
||||
private CancellationTokenSource _diskSpaceCheckToken = new CancellationTokenSource();
|
||||
private Task _diskSpaceCheckTask = null;
|
||||
|
||||
// Download all required actions.
|
||||
// Make sure all condition inputs are valid.
|
||||
@@ -119,6 +122,26 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var tokenPermissions = jobContext.Global.Variables.Get("system.github.token.permissions") ?? "";
|
||||
if (!string.IsNullOrEmpty(tokenPermissions))
|
||||
{
|
||||
context.Output($"##[group]GITHUB_TOKEN Permissions");
|
||||
var permissions = StringUtil.ConvertFromJson<Dictionary<string, string>>(tokenPermissions);
|
||||
foreach(KeyValuePair<string, string> entry in permissions)
|
||||
{
|
||||
context.Output($"{entry.Key}: {entry.Value}");
|
||||
}
|
||||
context.Output("##[endgroup]");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Output($"Fail to parse and display GITHUB_TOKEN permissions list: {ex.Message}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
|
||||
var repoFullName = context.GetGitHubContext("repository");
|
||||
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
|
||||
context.Debug($"Primary repository: {repoFullName}");
|
||||
@@ -325,6 +348,12 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
jobContext.Global.EnvironmentVariables.TryGetValue(Constants.Runner.Features.DiskSpaceWarning, out var enableWarning);
|
||||
if (StringUtil.ConvertToBoolean(enableWarning, defaultValue: true))
|
||||
{
|
||||
_diskSpaceCheckTask = CheckDiskSpaceAsync(context, _diskSpaceCheckToken.Token);
|
||||
}
|
||||
|
||||
return steps;
|
||||
}
|
||||
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
||||
@@ -335,7 +364,7 @@ namespace GitHub.Runner.Worker
|
||||
context.Result = TaskResult.Canceled;
|
||||
throw;
|
||||
}
|
||||
catch (FailedToResolveActionDownloadInfoException ex)
|
||||
catch (FailedToResolveActionDownloadInfoException ex)
|
||||
{
|
||||
// Log the error and fail the JobExtension Initialization.
|
||||
Trace.Error($"Caught exception from JobExtenion Initialization: {ex}");
|
||||
@@ -529,6 +558,11 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_diskSpaceCheckTask != null)
|
||||
{
|
||||
_diskSpaceCheckToken.Cancel();
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -544,6 +578,39 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CheckDiskSpaceAsync(IExecutionContext context, CancellationToken token)
|
||||
{
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
// Add warning when disk is lower than system.runner.lowdiskspacethreshold from service (default to 100 MB on service side)
|
||||
var lowDiskSpaceThreshold = context.Global.Variables.GetInt(WellKnownDistributedTaskVariables.RunnerLowDiskspaceThreshold);
|
||||
if (lowDiskSpaceThreshold == null)
|
||||
{
|
||||
Trace.Info($"Low diskspace warning is not enabled.");
|
||||
return;
|
||||
}
|
||||
var workDirRoot = Directory.GetDirectoryRoot(HostContext.GetDirectory(WellKnownDirectory.Work));
|
||||
var driveInfo = new DriveInfo(workDirRoot);
|
||||
var freeSpaceInMB = driveInfo.AvailableFreeSpace / 1024 / 1024;
|
||||
if (freeSpaceInMB < lowDiskSpaceThreshold)
|
||||
{
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"You are running out of disk space. The runner will stop working when the machine runs out of disk space. Free space left: {freeSpaceInMB} MB" };
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.LowDiskSpace;
|
||||
context.AddIssue(issue);
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.Delay(10 * 1000, token);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Dictionary<int, Process> SnapshotProcesses()
|
||||
{
|
||||
Dictionary<int, Process> snapshot = new Dictionary<int, Process>();
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -19,11 +19,16 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public static async Task<int> MainAsync(IHostContext context, string[] args)
|
||||
{
|
||||
Tracing trace = context.GetTrace(nameof(GitHub.Runner.Worker));
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_ATTACH_DEBUGGER")))
|
||||
{
|
||||
await WaitForDebugger(trace);
|
||||
}
|
||||
|
||||
// We may want to consider registering this handler in Worker.cs, similiar to the unloading/SIGTERM handler
|
||||
//ITerminal registers a CTRL-C handler, which keeps the Runner.Worker process running
|
||||
//and lets the Runner.Listener handle gracefully the exit.
|
||||
var term = context.GetService<ITerminal>();
|
||||
Tracing trace = context.GetTrace(nameof(GitHub.Runner.Worker));
|
||||
try
|
||||
{
|
||||
trace.Info($"Version: {BuildConstants.RunnerPackage.Version}");
|
||||
@@ -64,5 +69,25 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Runner.Worker is started by Runner.Listener in a separate process,
|
||||
/// so the two can't be debugged in the same session.
|
||||
/// This method halts the Runner.Worker process until a debugger is attached,
|
||||
/// allowing a developer to debug Runner.Worker from start to finish.
|
||||
/// </summary>
|
||||
private static async Task WaitForDebugger(Tracing trace)
|
||||
{
|
||||
trace.Info($"Waiting for a debugger to be attached. Edit the 'GITHUB_ACTIONS_RUNNER_ATTACH_DEBUGGER' environment variable to toggle this feature.");
|
||||
int waitInSeconds = 20;
|
||||
while (!Debugger.IsAttached && waitInSeconds-- > 0)
|
||||
{
|
||||
trace.Info($"Waiting for a debugger to be attached. {waitInSeconds} seconds left.");
|
||||
await Task.Delay(1000);
|
||||
}
|
||||
Debugger.Break();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,11 +10,19 @@ using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
/// <summary>
|
||||
/// Manages the "steps" context. The "steps" context is used to track individual steps
|
||||
/// "outcome", "conclusion", and "outputs".
|
||||
/// </summary>
|
||||
public sealed class StepsContext
|
||||
{
|
||||
private static readonly Regex _propertyRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
|
||||
private readonly DictionaryContextData _contextData = new DictionaryContextData();
|
||||
|
||||
/// <summary>
|
||||
/// Clears memory for a composite action's isolated "steps" context, after the action
|
||||
/// is finished executing.
|
||||
/// </summary>
|
||||
public void ClearScope(string scopeName)
|
||||
{
|
||||
if (_contextData.TryGetValue(scopeName, out _))
|
||||
@@ -23,6 +31,14 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the "steps" context for a given scope. The root steps in a workflow use the
|
||||
/// default "steps" context (i.e. scopeName="").
|
||||
///
|
||||
/// An isolated "steps" context is created for each composite action. All child steps
|
||||
/// within a composite action, share an isolated "steps" context. The scope name matches
|
||||
/// the composite action's fully qualified context name.
|
||||
/// </summary>
|
||||
public DictionaryContextData GetScope(string scopeName)
|
||||
{
|
||||
if (scopeName == null)
|
||||
|
||||
@@ -82,24 +82,21 @@ namespace GitHub.Runner.Worker
|
||||
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
|
||||
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
|
||||
|
||||
// Expression values
|
||||
step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for step");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
// Global env
|
||||
// Merge global env
|
||||
foreach (var pair in step.ExecutionContext.Global.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
bool evaluateStepEnvFailed = false;
|
||||
if (step is IActionRunner actionStep)
|
||||
{
|
||||
@@ -108,7 +105,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
try
|
||||
{
|
||||
// Evaluate and merge action's env block to env context
|
||||
// Evaluate and merge step env
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var env in actionEnvironment)
|
||||
@@ -118,7 +115,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// fail the step since there is an evaluate error.
|
||||
// Fail the step since there is an evaluate error
|
||||
Trace.Info("Caught exception from expression for step.env");
|
||||
evaluateStepEnvFailed = true;
|
||||
step.ExecutionContext.Error(ex);
|
||||
@@ -136,7 +133,7 @@ namespace GitHub.Runner.Worker
|
||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
||||
jobCancelRegister = jobContext.CancellationToken.Register(() =>
|
||||
{
|
||||
// mark job as cancelled
|
||||
// Mark job as cancelled
|
||||
jobContext.Result = TaskResult.Canceled;
|
||||
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
|
||||
|
||||
@@ -157,7 +154,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Cancel the step since we get exception while re-evaluate step condition.
|
||||
// Cancel the step since we get exception while re-evaluate step condition
|
||||
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
|
||||
step.ExecutionContext.Error(ex);
|
||||
}
|
||||
@@ -165,7 +162,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (!conditionReTestResult)
|
||||
{
|
||||
// Cancel the step.
|
||||
// Cancel the step
|
||||
Trace.Info("Cancel current running step.");
|
||||
step.ExecutionContext.CancelToken();
|
||||
}
|
||||
@@ -175,13 +172,13 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
if (jobContext.Result != TaskResult.Canceled)
|
||||
{
|
||||
// mark job as cancelled
|
||||
// Mark job as cancelled
|
||||
jobContext.Result = TaskResult.Canceled;
|
||||
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate condition.
|
||||
// Evaluate condition
|
||||
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
|
||||
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
|
||||
var conditionResult = false;
|
||||
@@ -206,22 +203,21 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// no evaluate error but condition is false
|
||||
if (!conditionResult && conditionEvaluateError == null)
|
||||
{
|
||||
// Condition == false
|
||||
// Condition is false
|
||||
Trace.Info("Skipping step due to condition evaluation.");
|
||||
CompleteStep(step, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
|
||||
}
|
||||
else if (conditionEvaluateError != null)
|
||||
{
|
||||
// fail the step since there is an evaluate error.
|
||||
// Condition error
|
||||
step.ExecutionContext.Error(conditionEvaluateError);
|
||||
CompleteStep(step, TaskResult.Failed);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Run the step.
|
||||
// Run the step
|
||||
await RunStepAsync(step, jobContext.CancellationToken);
|
||||
CompleteStep(step);
|
||||
}
|
||||
@@ -236,7 +232,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Update the job result.
|
||||
// Update the job result
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed)
|
||||
{
|
||||
Trace.Info($"Update job result with current step result '{step.ExecutionContext.Result}'.");
|
||||
@@ -262,7 +258,7 @@ namespace GitHub.Runner.Worker
|
||||
step.ExecutionContext.UpdateTimelineRecordDisplayName(actionRunner.DisplayName);
|
||||
}
|
||||
|
||||
// Start the step.
|
||||
// Start the step
|
||||
Trace.Info("Starting the step.");
|
||||
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");
|
||||
|
||||
@@ -303,7 +299,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
// Log the exception and cancel the step.
|
||||
// Log the exception and cancel the step
|
||||
Trace.Error($"Caught cancellation exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Canceled;
|
||||
@@ -311,7 +307,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log the error and fail the step.
|
||||
// Log the error and fail the step
|
||||
Trace.Error($"Caught exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
@@ -323,7 +319,7 @@ namespace GitHub.Runner.Worker
|
||||
step.ExecutionContext.Result = TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
|
||||
}
|
||||
|
||||
// Fixup the step result if ContinueOnError.
|
||||
// Fixup the step result if ContinueOnError
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed)
|
||||
{
|
||||
var continueOnError = false;
|
||||
@@ -348,7 +344,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||
|
||||
// Complete the step context.
|
||||
// Complete the step context
|
||||
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
|
||||
}
|
||||
|
||||
|
||||
@@ -32,10 +32,10 @@
|
||||
"outputs": {
|
||||
"mapping": {
|
||||
"loose-key-type": "non-empty-string",
|
||||
"loose-value-type": "outputs-attributes"
|
||||
"loose-value-type": "output-definition"
|
||||
}
|
||||
},
|
||||
"outputs-attributes": {
|
||||
"output-definition": {
|
||||
"mapping": {
|
||||
"properties": {
|
||||
"description": "string",
|
||||
@@ -112,7 +112,13 @@
|
||||
"item-type": "composite-step"
|
||||
}
|
||||
},
|
||||
"composite-step": {
|
||||
"composite-step":{
|
||||
"one-of": [
|
||||
"run-step",
|
||||
"uses-step"
|
||||
]
|
||||
},
|
||||
"run-step": {
|
||||
"mapping": {
|
||||
"properties": {
|
||||
"name": "string-steps-context",
|
||||
@@ -130,6 +136,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"uses-step": {
|
||||
"mapping": {
|
||||
"properties": {
|
||||
"name": "string-steps-context",
|
||||
"id": "non-empty-string",
|
||||
"uses": {
|
||||
"type": "non-empty-string",
|
||||
"required": true
|
||||
},
|
||||
"with": "step-with",
|
||||
"env": "step-env"
|
||||
}
|
||||
}
|
||||
},
|
||||
"container-runs-context": {
|
||||
"context": [
|
||||
"inputs"
|
||||
@@ -195,6 +215,23 @@
|
||||
"loose-key-type": "non-empty-string",
|
||||
"loose-value-type": "string"
|
||||
}
|
||||
},
|
||||
"step-with": {
|
||||
"context": [
|
||||
"github",
|
||||
"inputs",
|
||||
"strategy",
|
||||
"matrix",
|
||||
"steps",
|
||||
"job",
|
||||
"runner",
|
||||
"env",
|
||||
"hashFiles(1,255)"
|
||||
],
|
||||
"mapping": {
|
||||
"loose-key-type": "non-empty-string",
|
||||
"loose-value-type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
27
src/Sdk/Common/Common/Utility/HashAlgorithmExtensions.cs
Normal file
27
src/Sdk/Common/Common/Utility/HashAlgorithmExtensions.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Services.Common
|
||||
{
|
||||
public static class HashAlgorithmExtensions
|
||||
{
|
||||
public static async Task<byte[]> ComputeHashAsync(this HashAlgorithm hashAlg, Stream inputStream)
|
||||
{
|
||||
byte[] buffer = new byte[4096];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int read = await inputStream.ReadAsync(buffer, 0, buffer.Length);
|
||||
if (read == 0)
|
||||
break;
|
||||
|
||||
hashAlg.TransformBlock(buffer, 0, read, null, 0);
|
||||
}
|
||||
|
||||
hashAlg.TransformFinalBlock(buffer, 0, 0);
|
||||
return hashAlg.Hash;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,5 +85,19 @@ namespace GitHub.Services.Common
|
||||
var bytes = FromBase64StringNoPadding(base64String);
|
||||
return BitConverter.ToString(bytes).Replace("-", String.Empty);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts byte array into a hex string
|
||||
/// </summary>
|
||||
public static String ConvertToHexString(byte[] bytes)
|
||||
{
|
||||
// Convert byte array to string
|
||||
var sBuilder = new StringBuilder();
|
||||
for (int i = 0; i < bytes.Length; i++)
|
||||
{
|
||||
sBuilder.Append(bytes[i].ToString("x2"));
|
||||
}
|
||||
return sBuilder.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -587,6 +587,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
/// <param name="packageType"></param>
|
||||
/// <param name="platform"></param>
|
||||
/// <param name="version"></param>
|
||||
/// <param name="includeToken"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
@@ -594,6 +595,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
string packageType,
|
||||
string platform,
|
||||
string version,
|
||||
bool? includeToken = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -601,11 +603,18 @@ namespace GitHub.DistributedTask.WebApi
|
||||
Guid locationId = new Guid("8ffcd551-079c-493a-9c02-54346299d144");
|
||||
object routeValues = new { packageType = packageType, platform = platform, version = version };
|
||||
|
||||
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
|
||||
if (includeToken != null)
|
||||
{
|
||||
queryParams.Add("includeToken", includeToken.Value.ToString());
|
||||
}
|
||||
|
||||
return SendAsync<PackageMetadata>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(5.1, 2),
|
||||
queryParameters: queryParams,
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
@@ -616,6 +625,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
/// <param name="packageType"></param>
|
||||
/// <param name="platform"></param>
|
||||
/// <param name="top"></param>
|
||||
/// <param name="includeToken"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
@@ -623,6 +633,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
string packageType,
|
||||
string platform = null,
|
||||
int? top = null,
|
||||
bool? includeToken = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -635,6 +646,10 @@ namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture));
|
||||
}
|
||||
if (includeToken != null)
|
||||
{
|
||||
queryParams.Add("includeToken", includeToken.Value.ToString());
|
||||
}
|
||||
|
||||
return SendAsync<List<PackageMetadata>>(
|
||||
httpMethod,
|
||||
|
||||
@@ -37,6 +37,12 @@ namespace GitHub.DistributedTask.Logging
|
||||
return Base64StringEscapeShift(value, 2);
|
||||
}
|
||||
|
||||
// Used when we pass environment variables to docker to escape " with \"
|
||||
public static String CommandLineArgumentEscape(String value)
|
||||
{
|
||||
return value.Replace("\"", "\\\"");
|
||||
}
|
||||
|
||||
public static String ExpressionStringEscape(String value)
|
||||
{
|
||||
return Expressions2.Sdk.ExpressionUtility.StringEscape(value);
|
||||
|
||||
@@ -40,7 +40,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
/// <summary>
|
||||
/// Gets the maximum error message length before the message will be truncated.
|
||||
/// </summary>
|
||||
public Int32 MaxErrorMessageLength => 500;
|
||||
public Int32 MaxErrorMessageLength { get; set; } = 500;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the maximum number of errors that can be recorded when parsing a pipeline.
|
||||
|
||||
@@ -2459,6 +2459,25 @@ namespace GitHub.DistributedTask.WebApi
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public class UnresolvableActionDownloadInfoException : DistributedTaskException
|
||||
{
|
||||
public UnresolvableActionDownloadInfoException(String message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public UnresolvableActionDownloadInfoException(String message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
|
||||
protected UnresolvableActionDownloadInfoException(SerializationInfo info, StreamingContext context)
|
||||
: base(info, context)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public sealed class FailedToResolveActionDownloadInfoException : DistributedTaskException
|
||||
{
|
||||
|
||||
@@ -9,6 +9,9 @@ namespace GitHub.DistributedTask.WebApi
|
||||
Error = 1,
|
||||
|
||||
[EnumMember]
|
||||
Warning = 2
|
||||
Warning = 2,
|
||||
|
||||
[EnumMember]
|
||||
Notice = 3
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,6 +59,16 @@ namespace GitHub.DistributedTask.WebApi
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Auth token to download the package
|
||||
/// </summary>
|
||||
[DataMember]
|
||||
public String Token
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// MD5 hash as a base64 string
|
||||
/// </summary>
|
||||
|
||||
@@ -38,6 +38,8 @@ namespace GitHub.DistributedTask.WebApi
|
||||
this.RefName = recordToBeCloned.RefName;
|
||||
this.ErrorCount = recordToBeCloned.ErrorCount;
|
||||
this.WarningCount = recordToBeCloned.WarningCount;
|
||||
this.NoticeCount = recordToBeCloned.NoticeCount;
|
||||
this.AgentPlatform = recordToBeCloned.AgentPlatform;
|
||||
|
||||
if (recordToBeCloned.Log != null)
|
||||
{
|
||||
@@ -221,6 +223,13 @@ namespace GitHub.DistributedTask.WebApi
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Order = 55)]
|
||||
public Int32? NoticeCount
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
public List<Issue> Issues
|
||||
{
|
||||
get
|
||||
@@ -254,6 +263,13 @@ namespace GitHub.DistributedTask.WebApi
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(Order = 132, EmitDefaultValue = false)]
|
||||
public string AgentPlatform
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
public IList<TimelineAttempt> PreviousAttempts
|
||||
{
|
||||
get
|
||||
|
||||
@@ -13,5 +13,8 @@ namespace GitHub.DistributedTask.WebApi
|
||||
|
||||
[EnumMember]
|
||||
Completed,
|
||||
|
||||
[EnumMember]
|
||||
Delayed,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,5 +5,6 @@ namespace GitHub.DistributedTask.WebApi
|
||||
public static class WellKnownDistributedTaskVariables
|
||||
{
|
||||
public static readonly String JobId = "system.jobId";
|
||||
public static readonly String RunnerLowDiskspaceThreshold = "system.runner.lowdiskspacethreshold";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
using System.IO;
|
||||
using System.Net.Http;
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests
|
||||
{
|
||||
@@ -12,6 +13,12 @@ namespace GitHub.Runner.Common.Tests
|
||||
[Trait("Category", "Runner")]
|
||||
public async Task EnsureDotnetsdkBashDownloadScriptUpToDate()
|
||||
{
|
||||
if ((DateTime.UtcNow.Month - 1) % 3 != 0)
|
||||
{
|
||||
// Only check these script once a quater.
|
||||
return;
|
||||
}
|
||||
|
||||
string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh";
|
||||
|
||||
using (HttpClient downloadClient = new HttpClient())
|
||||
@@ -27,7 +34,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh"));
|
||||
|
||||
bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
|
||||
Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
|
||||
//Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +43,12 @@ namespace GitHub.Runner.Common.Tests
|
||||
[Trait("Category", "Runner")]
|
||||
public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate()
|
||||
{
|
||||
if ((DateTime.UtcNow.Month - 1) % 3 != 0)
|
||||
{
|
||||
// Only check these script once a quater.
|
||||
return;
|
||||
}
|
||||
|
||||
string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1";
|
||||
|
||||
using (HttpClient downloadClient = new HttpClient())
|
||||
@@ -51,7 +64,7 @@ namespace GitHub.Runner.Common.Tests
|
||||
string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1"));
|
||||
|
||||
bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
|
||||
Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
|
||||
//Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -189,5 +189,48 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
||||
_runnerServer.Verify(x => x.AddAgentAsync(It.IsAny<int>(), It.Is<TaskAgent>(a => a.Labels.Select(x => x.Name).ToHashSet().SetEquals(expectedLabels))), Times.Once);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "ConfigurationManagement")]
|
||||
public async Task ConfigureErrorOnMissingRunnerGroup()
|
||||
{
|
||||
using (TestHostContext tc = CreateTestContext())
|
||||
{
|
||||
var expectedPools = new List<TaskAgentPool>() { new TaskAgentPool(_defaultRunnerGroupName) { Id = _defaultRunnerGroupId, IsInternal = true } };
|
||||
_runnerServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.IsAny<TaskAgentPoolType>())).Returns(Task.FromResult(expectedPools));
|
||||
|
||||
Tracing trace = tc.GetTrace();
|
||||
|
||||
trace.Info("Creating config manager");
|
||||
IConfigurationManager configManager = new ConfigurationManager();
|
||||
configManager.Initialize(tc);
|
||||
|
||||
|
||||
trace.Info("Preparing command line arguments");
|
||||
var command = new CommandSettings(
|
||||
tc,
|
||||
new[]
|
||||
{
|
||||
"configure",
|
||||
"--url", _expectedServerUrl,
|
||||
"--name", _expectedAgentName,
|
||||
"--runnergroup", "notexists",
|
||||
"--work", _expectedWorkFolder,
|
||||
"--auth", _expectedAuthType,
|
||||
"--token", _expectedToken,
|
||||
});
|
||||
trace.Info("Constructed.");
|
||||
_store.Setup(x => x.IsConfigured()).Returns(false);
|
||||
_configMgrAgentSettings = null;
|
||||
|
||||
trace.Info("Ensuring all the required parameters are available in the command line parameter");
|
||||
var ex = await Assert.ThrowsAsync<TaskAgentPoolNotFoundException>(() => configManager.ConfigureAsync(command));
|
||||
|
||||
Assert.Contains("notexists", ex.Message);
|
||||
|
||||
_runnerServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.Is<TaskAgentPoolType>(p => p == TaskAgentPoolType.Automation)), Times.Exactly(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using GitHub.Runner.Listener;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
@@ -21,7 +22,8 @@ namespace GitHub.Runner.Common.Tests
|
||||
// Otherwise, the interface needs to whitelisted.
|
||||
var whitelist = new[]
|
||||
{
|
||||
typeof(ICredentialProvider)
|
||||
typeof(ICredentialProvider),
|
||||
typeof(ICheckExtension),
|
||||
};
|
||||
Validate(
|
||||
assembly: typeof(IMessageListener).GetTypeInfo().Assembly,
|
||||
@@ -85,7 +87,8 @@ namespace GitHub.Runner.Common.Tests
|
||||
continue;
|
||||
}
|
||||
|
||||
if (interfaceTypeInfo.FullName.Contains("IConverter")){
|
||||
if (interfaceTypeInfo.FullName.Contains("IConverter"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -188,6 +188,84 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void IssueCommandInvalidColumns()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
||||
.Returns((string tag, string line) =>
|
||||
{
|
||||
hc.GetTrace().Info($"{tag} {line}");
|
||||
return 1;
|
||||
});
|
||||
|
||||
var registeredCommands = new HashSet<string>(new string[1]{ "warning" });
|
||||
ActionCommand command;
|
||||
|
||||
// Columns when lines are different
|
||||
ActionCommand.TryParseV2("::warning line=1,endLine=2,col=1,endColumn=2::this is a warning", registeredCommands, out command);
|
||||
Assert.Equal("1", command.Properties["col"]);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.False(command.Properties.ContainsKey("col"));
|
||||
|
||||
// No lines with columns
|
||||
ActionCommand.TryParseV2("::warning col=1,endColumn=2::this is a warning", registeredCommands, out command);
|
||||
Assert.Equal("1", command.Properties["col"]);
|
||||
Assert.Equal("2", command.Properties["endColumn"]);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.False(command.Properties.ContainsKey("col"));
|
||||
Assert.False(command.Properties.ContainsKey("endColumn"));
|
||||
|
||||
// No line with endLine
|
||||
ActionCommand.TryParseV2("::warning endLine=1::this is a warning", registeredCommands, out command);
|
||||
Assert.Equal("1", command.Properties["endLine"]);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.Equal(command.Properties["endLine"], command.Properties["line"]);
|
||||
|
||||
// No column with endColumn
|
||||
ActionCommand.TryParseV2("::warning line=1,endColumn=2::this is a warning", registeredCommands, out command);
|
||||
Assert.Equal("2", command.Properties["endColumn"]);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.Equal(command.Properties["endColumn"], command.Properties["col"]);
|
||||
|
||||
// Empty Strings
|
||||
ActionCommand.TryParseV2("::warning line=,endLine=3::this is a warning", registeredCommands, out command);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.Equal(command.Properties["line"], command.Properties["endLine"]);
|
||||
|
||||
// Nonsensical line values
|
||||
ActionCommand.TryParseV2("::warning line=4,endLine=3::this is a warning", registeredCommands, out command);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.False(command.Properties.ContainsKey("line"));
|
||||
Assert.False(command.Properties.ContainsKey("endLine"));
|
||||
|
||||
/// Nonsensical column values
|
||||
ActionCommand.TryParseV2("::warning line=1,endLine=1,col=3,endColumn=2::this is a warning", registeredCommands, out command);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.False(command.Properties.ContainsKey("col"));
|
||||
Assert.False(command.Properties.ContainsKey("endColumn"));
|
||||
|
||||
// Valid
|
||||
ActionCommand.TryParseV2("::warning line=1,endLine=1,col=1,endColumn=2::this is a warning", registeredCommands, out command);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.Equal("1", command.Properties["line"]);
|
||||
Assert.Equal("1", command.Properties["endLine"]);
|
||||
Assert.Equal("1", command.Properties["col"]);
|
||||
Assert.Equal("2", command.Properties["endColumn"]);
|
||||
|
||||
// Backwards compatibility
|
||||
ActionCommand.TryParseV2("::warning line=1,col=1,file=test.txt::this is a warning", registeredCommands, out command);
|
||||
IssueCommandExtension.ValidateLinesAndColumns(command, _ec.Object);
|
||||
Assert.Equal("1", command.Properties["line"]);
|
||||
Assert.False(command.Properties.ContainsKey("endLine"));
|
||||
Assert.Equal("1", command.Properties["col"]);
|
||||
Assert.False(command.Properties.ContainsKey("endColumn"));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
@@ -268,6 +346,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
new EchoCommandExtension(),
|
||||
new InternalPluginSetRepoPathCommandExtension(),
|
||||
new SetEnvCommandExtension(),
|
||||
new WarningCommandExtension(),
|
||||
};
|
||||
foreach (var command in commands)
|
||||
{
|
||||
@@ -285,6 +364,10 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
_ec = new Mock<IExecutionContext>();
|
||||
_ec.SetupAllProperties();
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext());
|
||||
_ec.Object.Global.Variables = new Variables(
|
||||
hostContext,
|
||||
new Dictionary<string, VariableValue>()
|
||||
);
|
||||
|
||||
// Command manager
|
||||
_commandManager = new ActionCommandManager();
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,9 @@
|
||||
<BuildConstants Include="}"/>
|
||||
</ItemGroup>
|
||||
|
||||
<WriteLinesToFile File="Runner.Sdk/BuildConstants.cs" Lines="@(BuildConstants)" Overwrite="true" Encoding="Unicode"/>
|
||||
<WriteLinesToFile File="Runner.Sdk/BuildConstants.cs" Lines="@(BuildConstants)" Overwrite="true" />
|
||||
|
||||
<Exec Command="git update-index --assume-unchanged ./Runner.Sdk/BuildConstants.cs" ConsoleToMSBuild="true" />
|
||||
</Target>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.275.0
|
||||
2.279.0
|
||||
|
||||
Reference in New Issue
Block a user